[ 502.279549] env[63371]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63371) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 502.279956] env[63371]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63371) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 502.279998] env[63371]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63371) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 502.280343] env[63371]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 502.376146] env[63371]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63371) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 502.386650] env[63371]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=63371) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 502.986229] env[63371]: INFO nova.virt.driver [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 503.057056] env[63371]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.057281] env[63371]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 503.057402] env[63371]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63371) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 506.190484] env[63371]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-2a6237e7-0404-4ffb-ab84-c0079e70f1a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.207357] env[63371]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63371) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 506.207511] env[63371]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-37e38ba8-f740-4613-b77c-07af2d7b2dfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.240178] env[63371]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 00b12. [ 506.240361] env[63371]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.183s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 506.240921] env[63371]: INFO nova.virt.vmwareapi.driver [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] VMware vCenter version: 7.0.3 [ 506.244432] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c06f24-982a-4d35-94a9-f04cbd8e2a02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.261908] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000ec047-4f09-4b25-8ea2-30d67c104316 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.268473] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2123a8-6c85-4176-98db-f6c9413b1c19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.275297] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e426e6e-4d62-46ce-9fed-1febe292277f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.288654] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5904f43f-838e-48d6-97b7-d923d37f9605 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.294680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef52fa3b-6160-4e31-9e1d-a8b25533b851 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.324790] env[63371]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-6636aaa4-2142-4ff1-bbe1-e579c9f234b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.329757] env[63371]: DEBUG nova.virt.vmwareapi.driver [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] Extension org.openstack.compute already exists. {{(pid=63371) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 506.332474] env[63371]: INFO nova.compute.provider_config [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 506.835850] env[63371]: DEBUG nova.context [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),2164a72f-de1f-4c51-ba7d-fa987fc9734b(cell1) {{(pid=63371) load_cells /opt/stack/nova/nova/context.py:464}} [ 506.838083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.838316] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.838979] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 506.839421] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Acquiring lock "2164a72f-de1f-4c51-ba7d-fa987fc9734b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.839612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Lock "2164a72f-de1f-4c51-ba7d-fa987fc9734b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.840719] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Lock "2164a72f-de1f-4c51-ba7d-fa987fc9734b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 506.861123] env[63371]: INFO dbcounter [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Registered counter for database nova_cell0 [ 506.869439] env[63371]: INFO dbcounter [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Registered counter for database nova_cell1 [ 506.872727] env[63371]: DEBUG oslo_db.sqlalchemy.engines [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63371) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 506.873102] env[63371]: DEBUG oslo_db.sqlalchemy.engines [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63371) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 506.877813] env[63371]: ERROR nova.db.main.api [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 506.877813] env[63371]: result = function(*args, **kwargs) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 506.877813] env[63371]: return func(*args, **kwargs) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 506.877813] env[63371]: result = fn(*args, **kwargs) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 506.877813] env[63371]: return f(*args, **kwargs) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 506.877813] env[63371]: return db.service_get_minimum_version(context, binaries) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 506.877813] env[63371]: _check_db_access() [ 506.877813] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 506.877813] env[63371]: stacktrace = ''.join(traceback.format_stack()) [ 506.877813] env[63371]: [ 506.878801] env[63371]: ERROR nova.db.main.api [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 506.878801] env[63371]: result = function(*args, **kwargs) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 506.878801] env[63371]: return func(*args, **kwargs) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 506.878801] env[63371]: result = fn(*args, **kwargs) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 506.878801] env[63371]: return f(*args, **kwargs) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 506.878801] env[63371]: return db.service_get_minimum_version(context, binaries) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 506.878801] env[63371]: _check_db_access() [ 506.878801] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 506.878801] env[63371]: stacktrace = ''.join(traceback.format_stack()) [ 506.878801] env[63371]: [ 506.879332] env[63371]: WARNING nova.objects.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 506.879332] env[63371]: WARNING nova.objects.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Failed to get minimum service version for cell 2164a72f-de1f-4c51-ba7d-fa987fc9734b [ 506.879727] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Acquiring lock "singleton_lock" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 506.879886] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Acquired lock "singleton_lock" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 506.880142] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Releasing lock "singleton_lock" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 506.880492] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Full set of CONF: {{(pid=63371) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 506.880640] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ******************************************************************************** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 506.880768] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Configuration options gathered from: {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 506.880902] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 506.881102] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 506.881233] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ================================================================================ {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 506.881446] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] allow_resize_to_same_host = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.881617] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] arq_binding_timeout = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.881749] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] backdoor_port = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.881874] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] backdoor_socket = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882067] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] block_device_allocate_retries = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882242] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] block_device_allocate_retries_interval = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882414] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cert = self.pem {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882582] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882748] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute_monitors = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882913] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config_dir = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883094] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config_drive_format = iso9660 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883232] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883392] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config_source = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883557] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] console_host = devstack {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883717] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] control_exchange = nova {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883872] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cpu_allocation_ratio = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884038] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] daemon = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884205] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] debug = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884360] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_access_ip_network_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884524] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_availability_zone = nova {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884676] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_ephemeral_format = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884832] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_green_pool_size = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885076] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885242] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_schedule_zone = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885398] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] disk_allocation_ratio = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885557] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] enable_new_services = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885730] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] enabled_apis = ['osapi_compute'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885889] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] enabled_ssl_apis = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886062] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] flat_injected = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886222] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] force_config_drive = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886377] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] force_raw_images = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886543] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] graceful_shutdown_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886701] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] heal_instance_info_cache_interval = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886916] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] host = cpu-1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887110] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887275] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] initial_disk_allocation_ratio = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887438] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] initial_ram_allocation_ratio = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887653] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887816] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_build_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887971] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_delete_interval = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888154] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_format = [instance: %(uuid)s] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888322] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_name_template = instance-%08x {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888480] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_usage_audit = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888648] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_usage_audit_period = month {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888808] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888970] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instances_path = /opt/stack/data/nova/instances {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889146] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] internal_service_availability_zone = internal {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889302] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] key = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889457] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] live_migration_retry_count = 30 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889628] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_color = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889787] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_config_append = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889949] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_dir = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_options = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_rotate_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_rotate_interval_type = days {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_rotation_type = none {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891285] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891285] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891440] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891649] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891780] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892032] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] long_rpc_timeout = 1800 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892129] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_concurrent_builds = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892309] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_concurrent_live_migrations = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892520] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_concurrent_snapshots = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892711] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_local_block_devices = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892899] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_logfile_count = 30 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892962] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_logfile_size_mb = 200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893147] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] maximum_instance_delete_attempts = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893320] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metadata_listen = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893608] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metadata_listen_port = 8775 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893685] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metadata_workers = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893817] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] migrate_max_retries = -1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893985] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] mkisofs_cmd = genisoimage {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894323] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] my_block_storage_ip = 10.180.1.21 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894399] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] my_ip = 10.180.1.21 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894534] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] network_allocate_retries = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894712] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894893] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] osapi_compute_listen = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895084] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] osapi_compute_listen_port = 8774 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895277] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] osapi_compute_unique_server_name_scope = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895416] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] osapi_compute_workers = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895647] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] password_length = 12 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895939] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] periodic_enable = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.896118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] periodic_fuzzy_delay = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.896370] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pointer_model = usbtablet {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.896607] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] preallocate_images = none {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.896852] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] publish_errors = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897045] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pybasedir = /opt/stack/nova {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897274] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ram_allocation_ratio = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897486] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rate_limit_burst = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897721] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rate_limit_except_level = CRITICAL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897952] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rate_limit_interval = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.898198] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reboot_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.898412] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reclaim_instance_interval = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.898636] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] record = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.898876] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reimage_timeout_per_gb = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] report_interval = 120 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899300] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rescue_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899525] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reserved_host_cpus = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899693] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reserved_host_disk_mb = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899955] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reserved_host_memory_mb = 512 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900130] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reserved_huge_pages = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900328] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] resize_confirm_window = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900695] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] resize_fs_using_block_device = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900754] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] resume_guests_state_on_host_boot = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900986] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rpc_response_timeout = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901407] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] run_external_periodic_tasks = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901763] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] running_deleted_instance_action = reap {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901847] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] running_deleted_instance_poll_interval = 1800 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901992] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] running_deleted_instance_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.902219] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler_instance_sync_interval = 120 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.902440] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_down_time = 720 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.902652] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] servicegroup_driver = db {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.902815] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] shell_completion = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.903046] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] shelved_offload_time = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.903346] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] shelved_poll_interval = 3600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.903465] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] shutdown_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.903694] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] source_is_ipv6 = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ssl_only = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] sync_power_state_interval = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] sync_power_state_pool_size = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] syslog_log_facility = LOG_USER {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904879] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] tempdir = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904951] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] timeout_nbd = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905130] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] transport_url = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905294] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] update_resources_interval = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905456] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_cow_images = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905614] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_eventlog = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905771] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_journal = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905928] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_json = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906094] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_rootwrap_daemon = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906250] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_stderr = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906404] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_syslog = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906560] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vcpu_pin_set = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906726] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plugging_is_fatal = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906889] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plugging_timeout = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907104] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] virt_mkfs = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907274] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] volume_usage_poll_interval = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907434] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] watch_log_file = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907602] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] web = /usr/share/spice-html5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907783] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.907946] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.908160] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.908395] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_concurrency.disable_process_locking = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.908982] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909196] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909373] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909552] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909725] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909890] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910083] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.auth_strategy = keystone {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910256] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.compute_link_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910471] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910737] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.dhcp_domain = novalocal {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910988] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.enable_instance_password = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.911262] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.glance_link_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.911521] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.911780] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.912037] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.instance_list_per_project_cells = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.912294] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.list_records_by_skipping_down_cells = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.912543] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.local_metadata_per_cell = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.912797] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.max_limit = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.913062] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.metadata_cache_expiration = 15 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.913324] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.neutron_default_tenant_id = default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.913583] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.response_validation = warn {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.913837] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.use_neutron_default_nets = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.914107] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.914362] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.914611] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.914889] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.915159] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_targets = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.915414] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_jsonfile_path = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.915681] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.915951] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.backend = dogpile.cache.memcached {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.916255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.backend_argument = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.916474] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.backend_expiration_time = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.916731] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.config_prefix = cache.oslo {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.916982] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.dead_timeout = 60.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.917239] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.debug_cache_backend = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.917475] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.enable_retry_client = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.917721] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.enable_socket_keepalive = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.917973] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.enabled = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.918237] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.enforce_fips_mode = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.918486] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.expiration_time = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.918732] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.hashclient_retry_attempts = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.918977] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.hashclient_retry_delay = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.919236] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_dead_retry = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.919479] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_password = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.919727] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.919971] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.920228] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_pool_maxsize = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.920479] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.920726] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_sasl_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.920983] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.921244] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_socket_timeout = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.921486] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_username = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.921736] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.proxies = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.921982] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_db = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.922235] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_password = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.922492] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_sentinel_service_name = mymaster {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.922750] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923010] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_server = localhost:6379 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_socket_timeout = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923498] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_username = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923746] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.retry_attempts = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923994] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.retry_delay = 0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.924253] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.socket_keepalive_count = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.924500] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.socket_keepalive_idle = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.924751] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.socket_keepalive_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925023] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_allowed_ciphers = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925269] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925506] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925747] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925984] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.926241] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.926497] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.auth_type = password {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.926740] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.926993] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.catalog_info = volumev3::publicURL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.927244] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.927487] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.927736] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.cross_az_attach = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.927978] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.debug = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.928237] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.endpoint_template = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.928481] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.http_retries = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.928727] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.928964] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.929228] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.os_region_name = RegionOne {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.929470] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.929711] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.929962] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.930216] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.cpu_dedicated_set = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.930468] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.cpu_shared_set = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.930717] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.image_type_exclude_list = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.930959] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.931217] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.max_concurrent_disk_ops = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.931463] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.max_disk_devices_to_attach = -1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.931706] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.931958] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.932219] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.resource_provider_association_refresh = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.932464] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.932708] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.shutdown_retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.932969] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.933243] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] conductor.workers = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.933502] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] console.allowed_origins = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.933745] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] console.ssl_ciphers = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.933991] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] console.ssl_minimum_version = default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.934252] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] consoleauth.enforce_session_timeout = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.934501] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] consoleauth.token_ttl = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.934750] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.935048] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.935266] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.935509] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.935750] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936035] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936281] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936521] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936761] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936998] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.937251] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.937493] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.937733] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.937980] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.service_type = accelerator {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.938235] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.938474] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.938719] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.938957] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.939230] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.939471] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.939736] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.backend = sqlalchemy {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.939983] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.940252] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection_debug = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.940516] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection_parameters = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.940768] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection_recycle_time = 3600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.941019] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection_trace = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.941268] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.db_inc_retry_interval = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.941518] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.db_max_retries = 20 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.941761] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.db_max_retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942012] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.db_retry_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942261] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.max_overflow = 50 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942505] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.max_pool_size = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942748] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.max_retries = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942996] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.943249] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.mysql_wsrep_sync_wait = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.943491] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.pool_timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.943739] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.943980] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.slave_connection = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.944238] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.sqlite_synchronous = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.944483] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.use_db_reconnect = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.944747] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.backend = sqlalchemy {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.945009] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.945278] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection_debug = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.945534] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection_parameters = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.945780] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection_recycle_time = 3600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.946039] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection_trace = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.946296] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.db_inc_retry_interval = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.946545] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.db_max_retries = 20 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.946795] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.db_max_retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.947067] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.db_retry_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.947317] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.max_overflow = 50 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.947564] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.max_pool_size = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.947810] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.max_retries = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.948073] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.948319] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.948558] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.pool_timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.948800] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.949055] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.slave_connection = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.949303] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.sqlite_synchronous = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.949556] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] devices.enabled_mdev_types = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.949814] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.950084] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ephemeral_storage_encryption.default_format = luks {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.950339] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ephemeral_storage_encryption.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.950585] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.950838] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.api_servers = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.951097] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.951348] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.951590] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.951834] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.952095] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.952340] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.debug = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.952588] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.default_trusted_certificate_ids = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.952831] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.enable_certificate_validation = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.953092] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.enable_rbd_download = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.953336] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.953579] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.953818] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.954071] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.954316] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.954563] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.num_retries = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.954814] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.rbd_ceph_conf = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.955075] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.rbd_connect_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.955356] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.rbd_pool = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.955615] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.rbd_user = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.955857] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.956115] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.956362] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.956618] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.service_type = image {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.956864] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.957122] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.957371] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.957611] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.957871] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.958129] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.verify_glance_signatures = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.958375] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.958625] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] guestfs.debug = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.958869] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] mks.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.959333] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.959596] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.manager_interval = 2400 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.959850] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.precache_concurrency = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.960118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.remove_unused_base_images = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.960390] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.960640] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.960894] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.subdirectory_name = _base {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.961160] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.api_max_retries = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.961404] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.api_retry_interval = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.961646] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.961887] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.auth_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.962137] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.962375] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.962620] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.962864] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.conductor_group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.963122] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.963368] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.963610] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.963852] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.964108] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.964355] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.964596] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.964841] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.peer_list = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.965098] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.965351] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.965605] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.serial_console_state_timeout = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.965856] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.966121] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.service_type = baremetal {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.966368] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.shard = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.966617] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.966856] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.967136] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.967381] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.967642] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.967879] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.968156] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.968412] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] key_manager.fixed_key = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.968680] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.968920] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.barbican_api_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.969175] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.barbican_endpoint = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.969430] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.barbican_endpoint_type = public {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.969672] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.barbican_region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.969914] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.970164] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.970427] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.970675] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.970922] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.971178] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.number_of_retries = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.971429] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.retry_delay = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.971676] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.send_service_user_token = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.971917] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.972169] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.972417] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.verify_ssl = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.972660] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.verify_ssl_path = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.972903] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.973159] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.auth_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.973399] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.973640] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.973886] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.974144] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.974390] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.974635] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.974871] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.975132] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.approle_role_id = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.975379] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.approle_secret_id = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.975656] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.kv_mountpoint = secret {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.975901] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.kv_path = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.976155] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.kv_version = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.976396] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.namespace = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.976638] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.root_token_id = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.976874] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.ssl_ca_crt_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.977134] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.timeout = 60.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.977380] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.use_ssl = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.977633] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.977879] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.978133] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.978385] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.978630] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.978871] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.979126] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.979375] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.979617] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.979859] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980121] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980399] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980580] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980750] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980923] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.service_type = identity {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981104] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981269] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981466] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981628] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981808] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981968] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982184] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.connection_uri = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982352] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_mode = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982530] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_model_extra_flags = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982691] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_models = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982862] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_power_governor_high = performance {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983042] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_power_governor_low = powersave {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983212] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_power_management = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983386] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983555] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.device_detach_attempts = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983716] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.device_detach_timeout = 20 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983881] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.disk_cachemodes = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984063] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.disk_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984232] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.enabled_perf_events = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984393] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.file_backed_memory = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984557] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.gid_maps = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984714] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.hw_disk_discard = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984868] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.hw_machine_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985045] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_ceph_conf = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985215] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985380] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985550] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_glance_store_name = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985716] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_pool = rbd {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985880] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_type = default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986045] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_volume_group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986207] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.inject_key = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986364] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.inject_partition = -2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986521] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.inject_password = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986679] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.iscsi_iface = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986834] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.iser_use_multipath = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986993] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_bandwidth = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987164] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987323] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_downtime = 500 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987480] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987637] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987792] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_inbound_addr = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987949] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988120] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_permit_post_copy = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988282] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_scheme = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988454] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_timeout_action = abort {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988616] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_tunnelled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988772] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_uri = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988930] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_with_native_tls = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989105] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.max_queues = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989270] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989494] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989660] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.nfs_mount_options = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989966] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990155] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990319] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_iser_scan_tries = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990504] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_memory_encrypted_guests = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990674] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990838] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_pcie_ports = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991009] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_volume_scan_tries = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991179] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.pmem_namespaces = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991345] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.quobyte_client_cfg = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991667] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991843] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_connect_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992027] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992195] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992357] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_secret_uuid = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992519] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992678] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992847] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.remote_filesystem_transport = ssh {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993012] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rescue_image_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993175] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rescue_kernel_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993329] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rescue_ramdisk_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993493] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993651] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rx_queue_size = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993814] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.smbfs_mount_options = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994122] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994299] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.snapshot_compression = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994462] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.snapshot_image_format = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994683] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994847] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.sparse_logical_volumes = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995015] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.swtpm_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995189] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.swtpm_group = tss {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995353] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.swtpm_user = tss {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995520] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.sysinfo_serial = unique {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995675] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.tb_cache_size = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995829] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.tx_queue_size = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995987] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.uid_maps = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996160] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.use_virtio_for_bridges = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996328] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.virt_type = kvm {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996493] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.volume_clear = zero {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996652] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.volume_clear_size = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996813] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.volume_use_multipath = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996968] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_cache_path = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997145] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997307] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_group = qemu {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997465] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_opts = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997663] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997988] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998190] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_user = stack {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998357] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998536] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998711] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.auth_type = password {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998871] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999041] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999207] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999363] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999523] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999692] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.default_floating_pool = public {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999853] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000019] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.extension_sync_interval = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000187] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.http_retries = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000350] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000531] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000700] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000881] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001051] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001225] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.ovs_bridge = br-int {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001412] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.physnets = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001599] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.region_name = RegionOne {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001765] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001933] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.service_metadata_proxy = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002107] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002275] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.service_type = network {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002435] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002593] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002748] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002905] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003093] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003257] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003428] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.bdms_in_notifications = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003606] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.default_level = INFO {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003779] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.notification_format = unversioned {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003940] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.notify_on_state_change = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004126] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004300] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pci.alias = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004466] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pci.device_spec = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004627] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pci.report_in_placement = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004797] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004964] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.auth_type = password {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005146] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005305] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005461] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005622] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005779] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005935] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006102] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.default_domain_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006261] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.default_domain_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006418] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.domain_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006573] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.domain_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006729] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006887] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007053] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007215] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007370] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007535] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.password = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007691] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.project_domain_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007855] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.project_domain_name = Default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008028] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.project_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008204] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.project_name = service {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008370] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.region_name = RegionOne {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008535] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008693] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008859] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.service_type = placement {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009028] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009192] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009351] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009513] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.system_scope = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009668] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009824] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.trust_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009979] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.user_domain_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010165] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.user_domain_name = Default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010328] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.user_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010528] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.username = nova {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010715] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010878] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011074] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.cores = 20 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011245] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.count_usage_from_placement = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011438] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011619] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.injected_file_content_bytes = 10240 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011785] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.injected_file_path_length = 255 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011948] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.injected_files = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012125] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.instances = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012290] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.key_pairs = 100 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012453] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.metadata_items = 128 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012617] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.ram = 51200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012778] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.recheck_quota = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012942] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.server_group_members = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013116] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.server_groups = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013291] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013451] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013613] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.image_metadata_prefilter = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013769] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013926] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.max_attempts = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014097] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.max_placement_results = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014259] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014416] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.query_placement_for_image_type_support = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014574] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014744] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.workers = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014915] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015094] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015273] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015438] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015605] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015767] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015926] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016287] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.host_subset_size = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016452] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016610] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016767] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016925] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.isolated_hosts = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017094] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.isolated_images = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017411] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017570] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017727] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.pci_in_placement = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017882] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018049] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018208] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018365] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018522] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018676] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018830] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.track_instance_changes = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018998] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019178] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metrics.required = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019335] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metrics.weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019492] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019650] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metrics.weight_setting = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019956] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020141] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020314] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.port_range = 10000:20000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020508] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020677] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020844] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.serialproxy_port = 6083 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021013] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021195] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.auth_type = password {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021363] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021533] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021694] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021849] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022007] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022184] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.send_service_user_token = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022345] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022500] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022678] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.agent_enabled = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022836] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023163] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023367] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023539] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.html5proxy_port = 6082 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023696] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.image_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023849] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.jpeg_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024009] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.playback_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024176] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.require_secure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024339] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.server_listen = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024511] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024679] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.streaming_mode = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024835] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.zlib_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024994] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] upgrade_levels.baseapi = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025173] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] upgrade_levels.compute = auto {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025330] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] upgrade_levels.conductor = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025482] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] upgrade_levels.scheduler = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025647] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025802] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.auth_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025957] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026123] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026282] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026438] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026593] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026747] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026898] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027074] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.api_retry_count = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027232] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.ca_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027397] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.cache_prefix = devstack-image-cache {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027563] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.cluster_name = testcl1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027720] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.connection_pool_size = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027872] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.console_delay_seconds = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028046] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.datastore_regex = ^datastore.* {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028423] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.host_password = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028589] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.host_port = 443 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028751] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.host_username = administrator@vsphere.local {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028912] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.insecure = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029081] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.integration_bridge = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029244] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.maximum_objects = 100 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029400] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.pbm_default_policy = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029559] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.pbm_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029711] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.pbm_wsdl_location = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029873] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030037] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.serial_port_proxy_uri = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030195] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.serial_port_service_uri = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030381] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.task_poll_interval = 0.5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030555] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.use_linked_clone = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030724] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.vnc_keymap = en-us {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030887] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.vnc_port = 5900 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031059] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.vnc_port_total = 10000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031240] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.auth_schemes = ['none'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031418] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031740] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031928] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032121] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.novncproxy_port = 6080 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032309] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.server_listen = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032488] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032647] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.vencrypt_ca_certs = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032801] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.vencrypt_client_cert = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032953] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.vencrypt_client_key = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033144] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033305] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_deep_image_inspection = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033463] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033621] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033776] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033932] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_rootwrap = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034103] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.enable_numa_live_migration = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034263] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034420] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034578] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034734] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.libvirt_disable_apic = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034890] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035476] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035476] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035476] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035608] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035680] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035803] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035959] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036142] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036285] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036462] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036627] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.client_socket_timeout = 900 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036786] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.default_pool_size = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036945] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.keep_alive = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037120] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.max_header_line = 16384 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037281] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.secure_proxy_ssl_header = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037439] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.ssl_ca_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037600] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.ssl_cert_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037751] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.ssl_key_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037908] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.tcp_keepidle = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038090] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038257] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] zvm.ca_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038409] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] zvm.cloud_connector_url = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038696] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038866] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] zvm.reachable_timeout = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.039054] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.enforce_new_defaults = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.039429] env[63371]: WARNING oslo_config.cfg [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 507.039618] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.enforce_scope = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.039791] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.policy_default_rule = default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.039966] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040151] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.policy_file = policy.yaml {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040326] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040514] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040680] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040836] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040994] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041176] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_timeout = 60.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041351] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041568] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041754] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.connection_string = messaging:// {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041920] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042102] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.es_doc_type = notification {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042266] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.es_scroll_size = 10000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042429] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.es_scroll_time = 2m {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042591] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.filter_error_trace = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042756] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.hmac_keys = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042919] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.sentinel_service_name = mymaster {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043092] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.socket_timeout = 0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043254] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.trace_requests = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043408] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.trace_sqlalchemy = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043585] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler_jaeger.process_tags = {} {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043742] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler_jaeger.service_name_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043899] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler_otlp.service_name_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044072] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] remote_debug.host = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044232] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] remote_debug.port = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044407] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044565] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044722] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044876] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045041] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045201] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045359] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045516] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045673] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045838] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045995] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046175] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046339] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046503] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046699] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046918] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047331] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047508] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047674] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047837] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047999] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048172] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048336] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048491] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048651] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048802] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048959] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049165] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049398] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049584] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049800] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049971] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050156] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050327] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_version = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050519] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050732] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050983] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_notifications.retry = -1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051245] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051445] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_notifications.transport_url = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051623] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051786] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.auth_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051940] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052107] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052268] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052424] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052580] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052734] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052898] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_interface = publicURL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053062] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053219] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053371] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053526] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_service_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053683] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053834] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053985] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054151] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054302] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054452] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054606] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054755] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.service_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054909] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055071] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055227] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055377] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055529] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.valid_interfaces = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055680] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055892] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_reports.file_event_handler = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056117] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056311] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_reports.log_dir = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056485] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056643] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056798] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056959] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057135] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057293] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057458] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057612] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057763] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057923] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058108] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058272] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058437] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.flat_interface = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058614] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058784] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058950] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059130] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059293] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059455] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059613] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059785] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059952] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.isolate_vif = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060129] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060291] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060481] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060651] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.ovsdb_interface = native {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060812] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.per_port_bridge = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060981] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.capabilities = [21] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061154] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061310] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.helper_command = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061514] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061688] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061848] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062029] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062196] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062354] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.helper_command = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062518] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062677] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062833] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062963] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ******************************************************************************** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 507.063482] env[63371]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 507.567050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Getting list of instances from cluster (obj){ [ 507.567050] env[63371]: value = "domain-c8" [ 507.567050] env[63371]: _type = "ClusterComputeResource" [ 507.567050] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 507.568097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af89b2da-9a3d-4bcc-81d5-2638fe3d4f23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.577550] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Got total of 0 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 507.578092] env[63371]: WARNING nova.virt.vmwareapi.driver [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 507.578580] env[63371]: INFO nova.virt.node [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Generated node identity c079ebb1-2fa2-4df9-bdab-118e305653c1 [ 507.578818] env[63371]: INFO nova.virt.node [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Wrote node identity c079ebb1-2fa2-4df9-bdab-118e305653c1 to /opt/stack/data/n-cpu-1/compute_id [ 508.081579] env[63371]: WARNING nova.compute.manager [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Compute nodes ['c079ebb1-2fa2-4df9-bdab-118e305653c1'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 509.089091] env[63371]: INFO nova.compute.manager [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 510.095224] env[63371]: WARNING nova.compute.manager [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 510.095544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.095661] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.095830] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 510.095985] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 510.097268] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb483aa-420c-4b1b-ad5a-646c0a9ce0d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.105428] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373b3be6-c67e-4e4e-9f91-549470fad113 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.120228] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266fd036-172c-4dd6-a024-7b5961631d30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.126385] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95246de-9643-4b71-8ef7-c3ff78047fb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.154247] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 510.154391] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.154582] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.656789] env[63371]: WARNING nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] No compute node record for cpu-1:c079ebb1-2fa2-4df9-bdab-118e305653c1: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c079ebb1-2fa2-4df9-bdab-118e305653c1 could not be found. [ 511.160380] env[63371]: INFO nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c079ebb1-2fa2-4df9-bdab-118e305653c1 [ 512.668618] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 512.669048] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 512.858297] env[63371]: INFO nova.scheduler.client.report [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] [req-9ec11389-907b-4a4a-885b-3eaeea03e3f4] Created resource provider record via placement API for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 512.876433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721d650d-e8a5-46b0-a768-83dad0a9e485 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.884293] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cffddb-b6ee-49fe-83a6-746af8bd3c43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.914177] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df68eb08-2ae0-4f40-bd64-03c1e065bf3f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.921652] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9fe4fa-499f-4f58-86f1-1817e42dbb08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.935784] env[63371]: DEBUG nova.compute.provider_tree [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 513.468379] env[63371]: DEBUG nova.scheduler.client.report [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 513.468642] env[63371]: DEBUG nova.compute.provider_tree [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 0 to 1 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 513.468786] env[63371]: DEBUG nova.compute.provider_tree [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 513.515905] env[63371]: DEBUG nova.compute.provider_tree [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 1 to 2 during operation: update_traits {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 514.020993] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 514.021378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.867s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.021378] env[63371]: DEBUG nova.service [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Creating RPC server for service compute {{(pid=63371) start /opt/stack/nova/nova/service.py:186}} [ 514.035714] env[63371]: DEBUG nova.service [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Join ServiceGroup membership for this service compute {{(pid=63371) start /opt/stack/nova/nova/service.py:203}} [ 514.035911] env[63371]: DEBUG nova.servicegroup.drivers.db [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63371) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 569.037565] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.038249] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.038463] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 569.038601] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 569.541517] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.045770] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Getting list of instances from cluster (obj){ [ 570.045770] env[63371]: value = "domain-c8" [ 570.045770] env[63371]: _type = "ClusterComputeResource" [ 570.045770] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 570.049249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396fa62f-0da9-4738-830d-c38ee4938ebb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.056765] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Got total of 0 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 570.057168] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.057521] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 570.057819] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.561091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.561343] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.561542] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.561701] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 570.562612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977ea8bb-be55-47b2-bcf4-40de692aeb80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.570468] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba63a4c-102b-40c2-adf3-790f2473170f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.583727] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b75cb6-31bf-4991-9ae3-90e8c1485897 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.589711] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17529120-0de0-4ed7-b0dd-b0a6b53cc994 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.617918] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181393MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 570.618063] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.618236] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.635870] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 571.636104] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 571.649720] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a849c4-0491-4cf6-81fd-e3bc2db328e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.657192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479da5b2-ac36-4322-9609-af3f717a58e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.685485] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1475a21e-4319-40ea-a352-07b9c9e836ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.692257] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581fc180-a485-48eb-9da9-440fc98d2393 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.704685] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.207699] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 572.713021] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 572.713427] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.095s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.713472] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 572.713802] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Getting list of instances from cluster (obj){ [ 572.713802] env[63371]: value = "domain-c8" [ 572.713802] env[63371]: _type = "ClusterComputeResource" [ 572.713802] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 572.714821] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fd51fb-e060-43b0-a44e-d4275e347444 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.723307] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Got total of 0 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 626.109852] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.110289] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.615018] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.615254] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 626.615322] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 627.119720] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 627.120168] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120168] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120286] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120440] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120581] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120718] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120842] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 627.120974] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.623958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.624221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.624387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.624537] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 627.625473] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b62e17-6dc3-478a-b856-05342af3223f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.633805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae17e9c3-d0f8-4685-95e7-42ad761b42ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.648253] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4c2a10-030a-4353-8851-19e6d8184ff7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.655208] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3759736b-abd7-48c2-962c-32d714024afa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.684121] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181388MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 627.684341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.684506] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.702333] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 628.702609] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 628.715409] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a94e0aa-af5c-45e6-9865-ece5c4345bea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.723142] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2a2da8-b29e-41c7-bec8-0458ad569b72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.751586] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244bb3ee-7e69-4dde-911c-21a269342c69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.758212] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b76d4d3-de86-426a-9b82-a298687fbabb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.770611] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.273517] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 629.274785] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 629.274949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.276805] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.277216] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.277404] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 689.277593] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 689.781284] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 689.781533] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.781672] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.781818] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.781960] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.782116] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.782251] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.782374] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 689.782510] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 690.285610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.285984] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.286047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.286179] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 690.287098] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49dce01-36b6-4233-9c53-6926eb8a9dc7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.295310] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335ae530-c587-4fc7-b735-9a175f5f201e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.308865] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345d2aef-66ff-417d-8e4c-606a6f350a76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.314971] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de973a0-bd82-43ca-8453-a62bd5bce478 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.342833] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181380MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 690.342971] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.343167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.361739] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 691.362094] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 691.374890] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c8ea87-3e6a-45d0-8c1f-c8c27354da13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.382224] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560172fa-baf7-4ed0-b96b-3c6eba57a76e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.411969] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c04e1a7-9b95-4001-ad4d-20246384d009 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.418736] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd4cc1a-1ef8-4222-ac30-9cd49aa373ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.431214] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.934407] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 691.935705] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 691.935891] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.084276] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.084768] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.591723] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.591723] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 750.591723] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 751.095356] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 751.095613] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.095782] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.095927] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.096087] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.096231] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.096371] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.096498] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 751.096632] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.604163] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.604430] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.604612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.604767] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 751.605680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5170234d-57c6-4ef3-a51f-9753a57b75d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.614860] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825dc460-b75c-40b9-bbc2-a4297cda94bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.628362] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51af56c-c7f9-4e2f-8356-ef716938e171 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.634444] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f969127-b21d-475d-8ec8-7b0b58f20bc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.662114] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181385MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 751.662253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.662427] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.680528] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 752.680528] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 752.695816] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65769b39-9be4-4282-beae-8f12dc7ba23d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.703796] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b07061-2e51-442d-bb95-3603306ccb92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.733628] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef11c67-693a-4033-8a93-8c74c74ee560 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.740569] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db0950e-d7a2-48d6-b4b7-39ea94f955b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.753202] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.256887] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.258187] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 753.258365] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.430573] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.430985] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 802.934558] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] There are 0 instances to clean {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 802.934854] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.934948] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances with incomplete migration {{(pid=63371) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 803.437298] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.940628] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.941211] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 806.941211] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 807.444358] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 807.444612] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.444778] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.444941] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.445102] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.445245] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.445369] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 807.445505] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.949162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.949609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.949609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.949821] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 807.950695] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924d7f0b-15a7-4ae6-9ff9-8b0c77fffad5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.958929] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd26996-2932-43f4-99bf-27c56a2149c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.972693] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25af5030-3ff3-4d4b-ba95-e20fe49e9da6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.978827] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe5da37-4b2c-4e82-a09b-9f6ee675102c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.007100] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181388MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 808.007278] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.007425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.025088] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 809.025349] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 809.037680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6933970-6211-4a94-be3d-ddac8e0660c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.045234] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c13972a-6a7d-4edd-bd3e-e54e0d2dbbc2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.074234] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d121c365-dec6-4136-bc19-ad214e145a21 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.081099] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abbf9c2-b397-4cec-8ed8-689b237e874e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.093568] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.596116] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 809.597367] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 809.597544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.583578] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.583875] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.430476] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.430838] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.430983] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.431363] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 868.431363] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 868.934210] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 868.934437] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.934593] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.934744] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.934872] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 868.935034] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.438622] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.439020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.439067] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.439196] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 869.440096] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ec18a1-8072-4f1b-95f2-aebc689d0333 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.448212] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922c40a3-ac07-41fe-a004-74fa51248e2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.462612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62328ba-55c6-4db2-a931-ac644fec731b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.468732] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a50d9c-65ce-4837-b9b2-a8733b2681dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.496333] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181394MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 869.496482] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.496673] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.528296] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 870.528545] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 870.544214] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 870.558041] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 870.558220] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.571875] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 870.587247] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 870.597771] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c384e6-b475-46d1-8959-728ccea5f628 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.605068] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5743c0d-c1b8-45d7-aa8f-152a68e30bb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.634158] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f39005-911c-4254-918f-075380a21396 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.640920] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03703ed3-a233-4294-a851-38ed3e5eeaa1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.653311] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.156092] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 871.157406] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 871.157590] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.661s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.654322] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 871.654671] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.425479] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.430858] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.431290] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.431290] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.431449] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.431528] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 928.431660] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.935013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.935269] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.935433] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.935588] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 928.936518] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3baeb01b-9590-4da1-a015-12b03c24ba53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.944707] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bb86aa-acb4-4a5b-baba-344782b58932 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.958385] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f6cf7d-1bab-4541-8b0f-990f1023cc86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.964320] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1836a258-7a3e-4b69-8b11-32c63e87c7a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.993015] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181385MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 928.993153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.993324] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.010688] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 930.010924] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 930.023517] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19037273-33c8-4239-b80f-da0ffa568d50 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.030787] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06db7879-8916-4c52-9372-5e531a6cf418 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.059670] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da58ecc-9833-45c5-9c7b-c60e8b92e842 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.066510] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1c9cac-3cae-48be-bfd7-41efbfacb54b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.081121] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.583913] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.585196] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 930.585374] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.579486] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.579847] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.579847] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 932.579966] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 933.082686] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 933.082919] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.083113] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.430681] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.431148] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.431148] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 990.431283] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 990.934459] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 990.934706] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.934889] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.935062] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.935208] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.935351] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.935478] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 990.935615] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.438811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.439178] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.439228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.439364] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 991.440265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56b9245-f453-4413-8feb-ba4fcb6c30f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.450137] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294af45a-1ece-4c2a-91c7-096055997c1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.464380] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b88b03-6fd3-487a-bc1f-72fb0ec65510 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.470789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a725c1fe-57df-40dd-89a9-56fedd91356f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.499356] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181395MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 991.499494] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.499674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.518038] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 992.518289] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 992.531757] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06b1b56-e349-4605-8596-49d8a3143fe0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.539342] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bc2ab4-9743-4304-b0d0-f32e2ba26fc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.568424] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67d8580-5ae0-4d1e-9268-4f91b3a180cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.575712] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a345437-4330-4589-aff9-2e78cf9579b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.588237] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.091708] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.093079] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 993.093260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.588927] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.094263] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.431519] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.934978] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.935240] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.935403] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.935557] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1050.936505] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ba99bb-570b-44e6-8495-beceb02ac914 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.945141] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0195fdc3-3e40-4abb-81ba-afe72624739b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.959012] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb5f335-3fdb-4cce-8c5a-a2670c1740e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.965020] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c1afb7-14bf-4ba0-956f-ee57fedb31fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.993527] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181381MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1050.993670] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.993897] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.013019] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1052.013019] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1052.023833] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13202d6-245f-4fcd-9322-444cb36cc418 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.031679] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51359e06-6364-4f4d-8406-8d201ea7d147 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.060405] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3bcaba-b682-4f3a-a7c1-d2beb35c7de3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.066964] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06664c6-9042-4722-ae3d-74619930864c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.079347] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.583098] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.584356] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1052.584538] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.584899] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.585172] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.585326] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1053.585443] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1054.088465] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1054.088700] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.088834] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.088989] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.089178] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.089312] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.089460] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.089585] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1104.432585] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.432585] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1104.936645] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] There are 0 instances to clean {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1104.936645] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.936645] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances with incomplete migration {{(pid=63371) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1112.438268] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.438647] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.438647] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.438800] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.438956] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1112.439163] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.945569] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.945810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.945973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.946139] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1112.947110] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67b6ff4-633b-4926-a6b5-99d2f3dc4097 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.955339] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68425a3-b5d7-4300-9f49-281547992258 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.969332] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31148ec9-0578-472a-bae7-8825c850af7b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.975624] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a389e4-5f84-45fb-b1d0-9748518d1c99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.003668] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181384MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1113.003807] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.003986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.025606] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1114.025897] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1114.039983] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aedad39-d930-4991-93a8-b8ba5f656e9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.047434] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d212bf-facf-4020-aaec-2459ed3da6c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.076145] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9c73cd-f0db-4df4-a69a-a6874d6a9c9f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.086047] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de960e03-2b23-4020-9f55-ee0f04ac4fe0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.098674] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.601385] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1114.602668] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1114.602840] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.599s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.431306] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.936680] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.936887] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1115.936977] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1116.439623] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1116.439973] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.440120] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.440337] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.440513] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.046641] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.549818] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Getting list of instances from cluster (obj){ [ 1170.549818] env[63371]: value = "domain-c8" [ 1170.549818] env[63371]: _type = "ClusterComputeResource" [ 1170.549818] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1170.550955] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e632ddb-a60b-462a-a939-e1735b3942a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.560123] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Got total of 0 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1171.430611] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.431069] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.934135] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.934421] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.934563] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.934714] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1171.935646] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3a7835-5904-42e3-8fb4-fc5e9465e1eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.944073] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d5d0e3-a4aa-4136-af1a-0bda24fe2e76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.957930] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30e0b97-14ee-40d5-a7d4-f12c52802f77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.964236] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871d8078-0984-46bb-8039-e7fc0f1f5793 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.992515] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181375MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1171.992641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.992832] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.107421] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1173.107675] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1173.122608] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1173.133620] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1173.133831] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1173.143030] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1173.156542] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1173.167238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57debed-f04d-4def-9536-10e657cd14a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.174431] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a8aaa9-a01c-4fb0-92aa-c573dcdf9a61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.202766] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e85857-8636-4e4e-95b8-626d021ab6fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.209352] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a8d93f-8f06-4507-8f0f-90c999d25dd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.221763] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.725056] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1173.726241] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1173.726483] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.734s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.721505] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.721894] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.721987] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.722139] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.722278] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.722418] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.722605] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1175.431826] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.431990] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1175.432108] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1175.935532] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1231.431670] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.934925] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.935192] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.935339] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.935487] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1231.936388] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d890075-a8b8-4c0c-a5e4-5c685cd76900 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.944626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b951bf-3405-4094-95ce-04c630323372 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.958680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e441ed8-b2d9-4678-a2d0-e025a2cc924a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.964771] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6046214b-b175-408f-a9f1-a6a87258f4f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.992274] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181387MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1231.992428] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.992594] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.011893] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1233.012227] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1233.025720] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e127875-3761-4ed1-8ce4-7e26583114df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.033272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c0f8f0-b2d8-4425-a624-e92f63ec5a35 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.062198] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7749c540-0c64-4a76-90b4-c1a480ac3ee7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.069466] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd68e68a-dce8-4591-bc36-8ea7aa02799d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.082190] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.585152] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1233.586525] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1233.586706] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.580298] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.580582] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.085530] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.085728] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.085844] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.085987] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.086151] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.086304] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.086437] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1237.432361] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.432824] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1237.432824] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1237.935844] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1292.431159] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.934566] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.935401] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.935705] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.935914] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1292.937512] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adac518-ecc5-4651-9b34-f9918cddecd9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.954018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99effb12-ffed-4082-80c4-0e48c97fe12f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.968854] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950ef04d-5bdd-4786-9d66-00377974031d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.976164] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d81835b-34ab-456d-a070-f42c4da89e91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.009731] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181390MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1293.010095] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.010479] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.672185] env[63371]: INFO nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cffe6a79-ad7e-4488-b179-608a03c978aa has allocations against this compute host but is not found in the database. [ 1294.672185] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1294.672185] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1294.672185] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687fc318-142e-4bd0-ae65-46b1f2e8a3ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.672185] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d61006-4f1f-43bc-af19-b33b09eaba22 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.677730] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8996c4b-df67-450a-8215-192dd77748c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.684656] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c022c4-58c0-4a2e-81e2-2b319b40384a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.699136] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.819295] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "cffe6a79-ad7e-4488-b179-608a03c978aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.819948] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.203417] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1295.206021] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1295.206021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.195s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.323950] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1295.870612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.870612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.871980] env[63371]: INFO nova.compute.claims [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1296.443863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "a43fed87-5205-4148-834e-66778a90b7bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.444462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.952206] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1296.971822] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e99733-1acf-4a72-9fca-7091b3abd89b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.981223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0efe60e-4f67-4594-8832-faf9b32cb16d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.016677] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd0ae29-60bc-4474-8843-03f9004a628f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.026055] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8106bd03-0f21-4383-87c7-458470b88675 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.049091] env[63371]: DEBUG nova.compute.provider_tree [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.206698] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.206698] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.206698] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.206698] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.206859] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.207135] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.207246] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.207405] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1297.225527] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.225733] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.436860] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.436860] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1297.436860] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1297.484650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.553245] env[63371]: DEBUG nova.scheduler.client.report [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1297.729350] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1297.938724] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1297.938724] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1297.965880] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "ca202079-2eae-441e-80f6-e403497e137d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.965880] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.062319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.062319] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1298.067229] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.583s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.069799] env[63371]: INFO nova.compute.claims [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.267298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.330114] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "fc0715a1-a056-4a1b-a86e-959680effc97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.330114] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.380561] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "d9523239-79d1-434f-977a-e1f0e358c82b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.381134] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.472936] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1298.580043] env[63371]: DEBUG nova.compute.utils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1298.580043] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1298.580043] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1298.833437] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1298.884428] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1299.002754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.089224] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1299.212953] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79871264-7bf8-41d3-bd51-ace578111b87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.224501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825eaf91-3579-47c6-bb35-3b92379e6896 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.259287] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854ff6a8-5432-49b3-ad93-9c9061f8454e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.276549] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41979288-9832-4b26-b927-5c87a75d0f40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.289533] env[63371]: DEBUG nova.compute.provider_tree [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.364099] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.428721] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.731054] env[63371]: DEBUG nova.policy [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b3af3bbd35846198784331994497179', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '857815a7f15648948bb4ca862473ed06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1299.791651] env[63371]: DEBUG nova.scheduler.client.report [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1300.110964] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1300.154862] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1300.154990] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1300.155162] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1300.156569] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1300.156569] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1300.156569] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1300.159134] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1300.159134] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1300.159134] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1300.159134] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1300.159409] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1300.162241] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b724217c-cb28-4a26-a32e-77439c1aff3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.173022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01391cf-c75b-43da-a9f3-f1f55ba21121 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.193806] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e807a9-d5b0-487b-a48f-f8942fec8c32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.297684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.231s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.298306] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1300.301015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.034s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.302621] env[63371]: INFO nova.compute.claims [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1300.807386] env[63371]: DEBUG nova.compute.utils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1300.809589] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1300.809589] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1300.859352] env[63371]: DEBUG nova.policy [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62266d81b3724a98b80b05cbb08227fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7e27f48936d4019bd23bc30cd94f85b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1301.025407] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Successfully created port: d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.180482] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Successfully created port: 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.287356] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.287595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.316091] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1301.459341] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dea60c-a0ac-4e8f-bd08-96a891315ae4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.468314] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b791cc5f-1792-4fb0-b9bc-61cd925336b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.504249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9478e567-0267-4555-bf78-b5ad61a19e29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.513202] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90322a27-050a-48bf-9883-e4216641f101 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.528812] env[63371]: DEBUG nova.compute.provider_tree [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.789594] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1302.033905] env[63371]: DEBUG nova.scheduler.client.report [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1302.141408] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "47c1c242-d190-4523-8033-307c5a9b7535" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.141645] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.332175] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1302.335856] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.369100] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1302.369374] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1302.369526] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.369782] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1302.370765] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.370765] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1302.370956] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1302.371191] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1302.371339] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1302.371519] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1302.371689] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1302.372717] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3aaa6d-c3e0-4637-8b9f-30651f4084a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.382548] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad15e87-5fb1-4805-ba7d-1a161db3a877 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.544526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.242s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.546162] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1302.550852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.548s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.552436] env[63371]: INFO nova.compute.claims [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1302.644628] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1302.695130] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.695255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.065420] env[63371]: DEBUG nova.compute.utils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1303.068368] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1303.068368] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1303.184424] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.199010] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1303.213759] env[63371]: DEBUG nova.policy [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0d15de96e1a4ed994bbb2226d7a3da1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2d459d8cd874202a489beb816804cc8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1303.572191] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1303.789844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.845840] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a59e33-0afa-4ba0-ac7f-37bba63c8086 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.853350] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32db03ab-3e6d-4ec2-a264-75a1dfb6a315 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.883867] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c2810e-3868-4d10-b241-6fcbdaed85e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.891473] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4deae797-8209-414d-a037-1331f44b5110 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.904490] env[63371]: DEBUG nova.compute.provider_tree [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1304.198355] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Successfully updated port: d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.218321] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "201a2d1e-9e2c-4c07-92be-200408874ad4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.219157] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.357249] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Successfully created port: 8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1304.407563] env[63371]: DEBUG nova.scheduler.client.report [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1304.586833] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1304.624561] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1304.625395] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1304.625395] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.625395] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1304.625395] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.625600] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1304.626700] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1304.626954] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1304.627159] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1304.627333] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1304.627506] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1304.628419] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5723447f-d07f-4f98-923a-681b0f3198a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.638485] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1c83e3-34d1-4ee0-8337-304b283e033a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.702420] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.702530] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.702753] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1304.724633] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1304.915296] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.915820] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1304.922025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.559s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.922025] env[63371]: INFO nova.compute.claims [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1305.249397] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.290946] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1305.310269] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Successfully updated port: 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1305.369699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.369980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.422533] env[63371]: DEBUG nova.compute.utils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1305.427109] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1305.427109] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1305.617681] env[63371]: DEBUG nova.policy [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '892edb1b01bf4c4b84dbddd3baeb7761', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdeddf941ce24613ad019cc1202a294b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1305.675078] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Updating instance_info_cache with network_info: [{"id": "d2c4ae08-b10f-4881-8089-d2c46693937b", "address": "fa:16:3e:e9:8a:c1", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2c4ae08-b1", "ovs_interfaceid": "d2c4ae08-b10f-4881-8089-d2c46693937b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.816312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.816312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquired lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.816312] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.928722] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1306.152371] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f4d298-e387-45e9-a65b-e5bf1b408f93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.161539] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2657a05-c618-4a81-b6f0-63c16874a4f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.195045] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.196046] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance network_info: |[{"id": "d2c4ae08-b10f-4881-8089-d2c46693937b", "address": "fa:16:3e:e9:8a:c1", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2c4ae08-b1", "ovs_interfaceid": "d2c4ae08-b10f-4881-8089-d2c46693937b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1306.196158] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:8a:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2c4ae08-b10f-4881-8089-d2c46693937b', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.208523] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1306.209518] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f306b2e5-9eee-414f-8d4c-108723a2a5e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.215047] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47bb2c74-bbd8-4435-9f88-3cc2b1725ccf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.233667] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276fc9d3-521b-429c-9a3e-18b45a8ed058 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.253025] env[63371]: DEBUG nova.compute.provider_tree [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1306.255962] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created folder: OpenStack in parent group-v4. [ 1306.255962] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating folder: Project (857815a7f15648948bb4ca862473ed06). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1306.255962] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc09978b-a6b7-4828-b372-7e62abcbb9d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.269710] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created folder: Project (857815a7f15648948bb4ca862473ed06) in parent group-v368199. [ 1306.269930] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating folder: Instances. Parent ref: group-v368200. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1306.270197] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64cf9386-d948-4233-b623-ebe46acf2abd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.280612] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created folder: Instances in parent group-v368200. [ 1306.280880] env[63371]: DEBUG oslo.service.loopingcall [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1306.281524] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1306.281565] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-daf5ee01-89ce-4c1e-80fd-8ca2b9ea4f52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.302579] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.302579] env[63371]: value = "task-1773470" [ 1306.302579] env[63371]: _type = "Task" [ 1306.302579] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.314015] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773470, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.335164] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Successfully created port: eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1306.521493] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1306.760089] env[63371]: DEBUG nova.scheduler.client.report [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1306.812337] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.820799] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773470, 'name': CreateVM_Task, 'duration_secs': 0.376152} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.820969] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1306.832506] env[63371]: DEBUG oslo_vmware.service [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97d095a-951b-4197-b750-6b9a6134d5d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.841227] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.841463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.843239] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1306.844056] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6896886d-842c-46a8-be2d-a48a9a962858 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.851609] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1306.851609] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52acae75-ba31-2238-c0fb-2fb0211af0c0" [ 1306.851609] env[63371]: _type = "Task" [ 1306.851609] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.860831] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52acae75-ba31-2238-c0fb-2fb0211af0c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.950940] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1306.983116] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1306.983377] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1306.983724] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1306.983724] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1306.983898] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1306.983957] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1306.984731] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1306.984918] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1306.985119] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1306.985290] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1306.985464] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1306.986347] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44078da-8778-43ea-bcf9-3ce7780d5c08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.995384] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf5c3bb-f80c-426b-823d-5c08beaafc3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.133997] env[63371]: DEBUG nova.compute.manager [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Received event network-vif-plugged-d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1307.133997] env[63371]: DEBUG oslo_concurrency.lockutils [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] Acquiring lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.134178] env[63371]: DEBUG oslo_concurrency.lockutils [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.134247] env[63371]: DEBUG oslo_concurrency.lockutils [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.134409] env[63371]: DEBUG nova.compute.manager [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] No waiting events found dispatching network-vif-plugged-d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1307.134568] env[63371]: WARNING nova.compute.manager [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Received unexpected event network-vif-plugged-d2c4ae08-b10f-4881-8089-d2c46693937b for instance with vm_state building and task_state spawning. [ 1307.264183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.264183] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1307.267766] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.839s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.271018] env[63371]: INFO nova.compute.claims [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1307.316610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Releasing lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.316718] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance network_info: |[{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1307.317222] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:9a:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2f5e5e2-e460-49ce-aa24-232e4a8007af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85189d02-f613-4d29-a47a-b7c1ce74c9f3', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1307.325787] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Creating folder: Project (a7e27f48936d4019bd23bc30cd94f85b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1307.326752] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8673b809-7e97-43cb-85e2-a7735d31dcf5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.337072] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Created folder: Project (a7e27f48936d4019bd23bc30cd94f85b) in parent group-v368199. [ 1307.337396] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Creating folder: Instances. Parent ref: group-v368203. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1307.337642] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0e02fc7-5633-424e-91a1-f74813dda5be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.348441] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Created folder: Instances in parent group-v368203. [ 1307.348676] env[63371]: DEBUG oslo.service.loopingcall [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1307.348872] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1307.349098] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-871d21aa-2280-41a6-a030-b5b82440dac4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.382358] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.382782] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.383112] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.383263] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.384045] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.385482] env[63371]: DEBUG nova.compute.manager [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-vif-plugged-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1307.386450] env[63371]: DEBUG oslo_concurrency.lockutils [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] Acquiring lock "a43fed87-5205-4148-834e-66778a90b7bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.386680] env[63371]: DEBUG oslo_concurrency.lockutils [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] Lock "a43fed87-5205-4148-834e-66778a90b7bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.386868] env[63371]: DEBUG oslo_concurrency.lockutils [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] Lock "a43fed87-5205-4148-834e-66778a90b7bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.387091] env[63371]: DEBUG nova.compute.manager [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] No waiting events found dispatching network-vif-plugged-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1307.387379] env[63371]: WARNING nova.compute.manager [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received unexpected event network-vif-plugged-85189d02-f613-4d29-a47a-b7c1ce74c9f3 for instance with vm_state building and task_state spawning. [ 1307.388257] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1307.388257] env[63371]: value = "task-1773473" [ 1307.388257] env[63371]: _type = "Task" [ 1307.388257] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.388585] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a372fb0b-23b7-41d8-937d-0491f2181f43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.404069] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773473, 'name': CreateVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.413209] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.413209] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1307.414265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e327341-7e15-48f4-8322-6508b444d253 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.423624] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7946d777-3f9c-456e-8329-553aa5a94c4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.428730] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1307.428730] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523fb129-fbf5-72c6-12d6-d2a7aa52d288" [ 1307.428730] env[63371]: _type = "Task" [ 1307.428730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.438078] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523fb129-fbf5-72c6-12d6-d2a7aa52d288, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.529981] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Successfully updated port: 8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1307.782649] env[63371]: DEBUG nova.compute.utils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1307.786647] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1307.786647] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1307.876046] env[63371]: DEBUG nova.policy [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58195dc4ac74493cbe7ed4fbe63bce54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cc236260a947899c5e09bca25f7360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1307.904225] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773473, 'name': CreateVM_Task, 'duration_secs': 0.335532} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.904225] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1307.904225] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.908021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.908021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1307.908021] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-511d2f5b-9517-4f75-8664-b9029aa5e768 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.910876] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1307.910876] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52be06f6-b316-7523-b62c-f54c466677e1" [ 1307.910876] env[63371]: _type = "Task" [ 1307.910876] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.919422] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52be06f6-b316-7523-b62c-f54c466677e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.944477] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Preparing fetch location {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1307.944614] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating directory with path [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.944993] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34acdc5d-6d23-48fb-9b50-8755008c07fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.973382] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created directory with path [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.973657] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Fetch image to [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1307.973892] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Downloading image file data 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 to [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk on the data store datastore1 {{(pid=63371) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1307.974771] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d817d70a-7fda-48f1-810d-bf49ed5ccda6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.986843] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046e5b88-b4b2-4517-9a50-61a79debf7a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.997679] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7006132-b080-4421-9435-bd8455aa66c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.032153] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ceb6274-5437-4287-9b32-1595f9e76dce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.035263] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.035395] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquired lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.035532] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1308.041358] env[63371]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c09b0297-bf4c-45f6-b0ac-9f25c4c319b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.069509] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Downloading image file data 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 to the data store datastore1 {{(pid=63371) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1308.138808] env[63371]: DEBUG oslo_vmware.rw_handles [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63371) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1308.291634] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1308.383503] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Successfully created port: 969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1308.434931] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.434931] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1308.435253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.556874] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484da872-29ab-4493-b92d-878fe73bb311 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.575872] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e699a7d-f7a0-4761-87f9-e7047f7c99f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.623804] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9fc495-90d6-46af-ab05-8d1ff488b558 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.638238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a1960a-f877-40f8-8136-0402da721b18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.644617] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1308.659092] env[63371]: DEBUG nova.compute.provider_tree [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1308.918035] env[63371]: DEBUG oslo_vmware.rw_handles [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Completed reading data from the image iterator. {{(pid=63371) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1308.918328] env[63371]: DEBUG oslo_vmware.rw_handles [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1308.979964] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Downloaded image file data 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 to vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk on the data store datastore1 {{(pid=63371) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1308.982146] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Caching image {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1308.982402] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copying Virtual Disk [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk to [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1308.982681] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70366f22-f381-40ca-83f0-410cfb795750 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.994345] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1308.994345] env[63371]: value = "task-1773474" [ 1308.994345] env[63371]: _type = "Task" [ 1308.994345] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.005417] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773474, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.047134] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Updating instance_info_cache with network_info: [{"id": "8905eb18-7130-4195-b35c-38e03dd31b91", "address": "fa:16:3e:89:e7:88", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8905eb18-71", "ovs_interfaceid": "8905eb18-7130-4195-b35c-38e03dd31b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.057966] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.058148] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.144632] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Successfully updated port: eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1309.165531] env[63371]: DEBUG nova.scheduler.client.report [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1309.308924] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1309.386521] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1309.386521] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1309.386521] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1309.386790] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1309.386790] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1309.386790] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1309.391439] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613b43b6-d79a-431b-9aeb-2200b2d6e4c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.406552] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c7223f-e756-41e8-9495-230d02885cd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.509460] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773474, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.549295] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Releasing lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.549633] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance network_info: |[{"id": "8905eb18-7130-4195-b35c-38e03dd31b91", "address": "fa:16:3e:89:e7:88", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8905eb18-71", "ovs_interfaceid": "8905eb18-7130-4195-b35c-38e03dd31b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1309.550197] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:e7:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8905eb18-7130-4195-b35c-38e03dd31b91', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1309.557718] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Creating folder: Project (a2d459d8cd874202a489beb816804cc8). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.558050] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6c67633-26c7-4729-a0d1-899b17406a37 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.570363] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Created folder: Project (a2d459d8cd874202a489beb816804cc8) in parent group-v368199. [ 1309.570363] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Creating folder: Instances. Parent ref: group-v368206. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.570604] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12ddf7b6-e327-466d-9e0f-5f692a4960c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.581045] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Created folder: Instances in parent group-v368206. [ 1309.581045] env[63371]: DEBUG oslo.service.loopingcall [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1309.581222] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1309.581427] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16e18c16-a8a0-4434-bf3b-82758ffd39e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.600644] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1309.600644] env[63371]: value = "task-1773477" [ 1309.600644] env[63371]: _type = "Task" [ 1309.600644] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.609314] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773477, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.649525] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.650436] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquired lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.650658] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1309.672283] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.672950] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1309.675843] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.340s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.677747] env[63371]: INFO nova.compute.claims [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1310.005011] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773474, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656695} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.005502] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copied Virtual Disk [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk to [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1310.005694] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleting the datastore file [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1310.005952] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cef5904-7663-44e4-8db4-f544154038f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.012636] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1310.012636] env[63371]: value = "task-1773478" [ 1310.012636] env[63371]: _type = "Task" [ 1310.012636] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.020898] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773478, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.057280] env[63371]: DEBUG nova.compute.manager [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Received event network-changed-d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1310.057280] env[63371]: DEBUG nova.compute.manager [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Refreshing instance network info cache due to event network-changed-d2c4ae08-b10f-4881-8089-d2c46693937b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1310.057280] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] Acquiring lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.057280] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] Acquired lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.057280] env[63371]: DEBUG nova.network.neutron [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Refreshing network info cache for port d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.112951] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773477, 'name': CreateVM_Task, 'duration_secs': 0.353244} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.113176] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1310.114216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.115315] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.115315] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1310.119145] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16c52329-078c-47eb-8d02-de135ee65c8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.125689] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1310.125689] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a911f5-8265-1c72-5c78-09b6a38270a3" [ 1310.125689] env[63371]: _type = "Task" [ 1310.125689] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.135409] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a911f5-8265-1c72-5c78-09b6a38270a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.183186] env[63371]: DEBUG nova.compute.utils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1310.191054] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1310.191250] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1310.198345] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1310.198345] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing instance network info cache due to event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1310.198345] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.198345] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquired lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.198345] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.231496] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1310.303078] env[63371]: DEBUG nova.policy [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58195dc4ac74493cbe7ed4fbe63bce54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cc236260a947899c5e09bca25f7360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1310.525177] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773478, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024217} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.525619] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1310.525619] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Moving file from [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 to [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9. {{(pid=63371) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1310.526142] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-70049fee-71ed-46eb-9f83-86ccf0081b76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.533437] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1310.533437] env[63371]: value = "task-1773479" [ 1310.533437] env[63371]: _type = "Task" [ 1310.533437] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.543791] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773479, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.638024] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.638024] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1310.638024] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.691492] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1310.922209] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Updating instance_info_cache with network_info: [{"id": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "address": "fa:16:3e:4b:62:86", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0a9632-9b", "ovs_interfaceid": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.966133] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4c7082-c416-44dc-9930-eb84d84166ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.974527] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5586f96-e1f0-4352-b151-d2077f2bcedc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.014710] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633a9118-cf08-4a61-b32f-7aebe5ebd0b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.023180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d55bbe7-027f-436f-ab28-2a8cfd193dfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.038223] env[63371]: DEBUG nova.compute.provider_tree [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.047683] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773479, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.026407} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.047923] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] File moved {{(pid=63371) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1311.048149] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Cleaning up location [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1311.048992] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleting the datastore file [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1311.048992] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e036020e-0f80-4803-8ce6-bc322ebd2e72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.057586] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1311.057586] env[63371]: value = "task-1773480" [ 1311.057586] env[63371]: _type = "Task" [ 1311.057586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.072292] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.233629] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Successfully updated port: 969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1311.266243] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Successfully created port: 6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1311.412200] env[63371]: DEBUG nova.network.neutron [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Updated VIF entry in instance network info cache for port d2c4ae08-b10f-4881-8089-d2c46693937b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.412560] env[63371]: DEBUG nova.network.neutron [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Updating instance_info_cache with network_info: [{"id": "d2c4ae08-b10f-4881-8089-d2c46693937b", "address": "fa:16:3e:e9:8a:c1", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2c4ae08-b1", "ovs_interfaceid": "d2c4ae08-b10f-4881-8089-d2c46693937b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.429021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Releasing lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.429382] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance network_info: |[{"id": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "address": "fa:16:3e:4b:62:86", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0a9632-9b", "ovs_interfaceid": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1311.430509] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:62:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb0a9632-9bb3-4855-8ad5-af6c7a628900', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1311.440017] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Creating folder: Project (cdeddf941ce24613ad019cc1202a294b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1311.440017] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e10756c-9f64-4276-91f8-6ab691b5e716 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.450901] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Created folder: Project (cdeddf941ce24613ad019cc1202a294b) in parent group-v368199. [ 1311.451224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Creating folder: Instances. Parent ref: group-v368209. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1311.451483] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42b7f1e7-d34f-4e51-bcfb-7abb37dd6da8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.463326] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Created folder: Instances in parent group-v368209. [ 1311.463804] env[63371]: DEBUG oslo.service.loopingcall [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1311.464122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1311.464482] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b9611e7-2c7f-4c6c-921f-28051d4cf338 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.486846] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1311.486846] env[63371]: value = "task-1773483" [ 1311.486846] env[63371]: _type = "Task" [ 1311.486846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.495478] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773483, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.545419] env[63371]: DEBUG nova.scheduler.client.report [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1311.573270] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024329} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.574270] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1311.575649] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3710362c-e0a8-40d9-8304-3cd08c64b34e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.582369] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1311.582369] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5209196a-47df-d5a9-cfb2-738f8ec4ea13" [ 1311.582369] env[63371]: _type = "Task" [ 1311.582369] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.596101] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5209196a-47df-d5a9-cfb2-738f8ec4ea13, 'name': SearchDatastore_Task, 'duration_secs': 0.008508} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.596101] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.596101] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa/cffe6a79-ad7e-4488-b179-608a03c978aa.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1311.596417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.596682] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.596950] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1a9d429-c84b-42f9-9dd4-46f72456ed29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.599095] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94c6c62f-2d15-4933-8695-1d561e4a23de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.606119] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1311.606119] env[63371]: value = "task-1773484" [ 1311.606119] env[63371]: _type = "Task" [ 1311.606119] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.610451] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.610719] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1311.612184] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8307bbcf-20fc-4e32-a4f7-af5e44f9de3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.617814] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.620854] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1311.620854] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5243318a-51f4-f0e6-d25e-6a20352098de" [ 1311.620854] env[63371]: _type = "Task" [ 1311.620854] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.628741] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5243318a-51f4-f0e6-d25e-6a20352098de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.696610] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updated VIF entry in instance network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.697077] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.705711] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1311.741213] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1311.741505] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1311.742550] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1311.742776] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1311.742943] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1311.743110] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1311.743343] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1311.743482] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1311.743684] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1311.743848] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1311.744054] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1311.744657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.744788] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.744932] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1311.747308] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6886e1b5-967b-45e2-826a-81777497c2fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.756919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.756919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.762966] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e3c320-111d-4381-b4ce-2df5034505a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.917027] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] Releasing lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.999137] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773483, 'name': CreateVM_Task, 'duration_secs': 0.377513} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.000540] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1312.000973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.001169] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.004684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1312.005759] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-999bdef6-4eb8-4ed3-93be-9d5b6b77cbf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.010905] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1312.010905] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fea4f1-8258-e3d4-92f3-3802e0e3850b" [ 1312.010905] env[63371]: _type = "Task" [ 1312.010905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.020826] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fea4f1-8258-e3d4-92f3-3802e0e3850b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.053965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.057556] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1312.058958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.875s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.060700] env[63371]: INFO nova.compute.claims [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1312.118205] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496473} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.119072] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa/cffe6a79-ad7e-4488-b179-608a03c978aa.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1312.119072] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1312.119072] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d942e14-ca95-4078-a447-cec723ecc188 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.130210] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5243318a-51f4-f0e6-d25e-6a20352098de, 'name': SearchDatastore_Task, 'duration_secs': 0.016445} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.132269] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1312.132269] env[63371]: value = "task-1773485" [ 1312.132269] env[63371]: _type = "Task" [ 1312.132269] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.132549] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1885803f-75a0-406a-aafd-5c4d25c1b0e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.141627] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1312.141627] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e37516-7277-358c-748f-e92b45364aba" [ 1312.141627] env[63371]: _type = "Task" [ 1312.141627] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.155799] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e37516-7277-358c-748f-e92b45364aba, 'name': SearchDatastore_Task, 'duration_secs': 0.009201} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.156354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.156812] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] a43fed87-5205-4148-834e-66778a90b7bc/a43fed87-5205-4148-834e-66778a90b7bc.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1312.157175] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.157415] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1312.157690] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5175ed83-1d9c-4ce6-921d-285c14ad7ebd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.160646] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6e6a57a-0ddf-4808-9151-beb90f4d3823 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.166821] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1312.166821] env[63371]: value = "task-1773486" [ 1312.166821] env[63371]: _type = "Task" [ 1312.166821] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.172698] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1312.173024] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1312.174129] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5155e39-c1a9-49cc-99f7-c670da405fe7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.183227] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.187215] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1312.187215] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a23760-05fc-6987-0f07-43dffb1c2253" [ 1312.187215] env[63371]: _type = "Task" [ 1312.187215] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.194950] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a23760-05fc-6987-0f07-43dffb1c2253, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.200654] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Releasing lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.201082] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Received event network-vif-plugged-8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1312.201248] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.201485] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.201661] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.201843] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] No waiting events found dispatching network-vif-plugged-8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1312.202058] env[63371]: WARNING nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Received unexpected event network-vif-plugged-8905eb18-7130-4195-b35c-38e03dd31b91 for instance with vm_state building and task_state spawning. [ 1312.202367] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Received event network-changed-8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1312.202585] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Refreshing instance network info cache due to event network-changed-8905eb18-7130-4195-b35c-38e03dd31b91. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1312.202792] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.202931] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquired lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.203130] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Refreshing network info cache for port 8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1312.326529] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1312.523509] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fea4f1-8258-e3d4-92f3-3802e0e3850b, 'name': SearchDatastore_Task, 'duration_secs': 0.053215} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.523753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.524058] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1312.524248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.567758] env[63371]: DEBUG nova.compute.utils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1312.575477] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1312.575477] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1312.649543] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773485, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.649826] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1312.653204] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abfd05b-9ccd-4ec9-b434-358832ba10aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.691848] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa/cffe6a79-ad7e-4488-b179-608a03c978aa.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1312.693581] env[63371]: DEBUG nova.policy [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '919325b57d54429a9bb73f64cd086373', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e7f96aff7d240469616d256291f7081', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1312.695923] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Updating instance_info_cache with network_info: [{"id": "969cd918-b804-4635-a828-8235c720e31b", "address": "fa:16:3e:9c:ad:dd", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969cd918-b8", "ovs_interfaceid": "969cd918-b804-4635-a828-8235c720e31b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.699828] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c6ea92f-3d05-4d6d-bb3d-b2d5b6209575 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.739204] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.742870] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1312.742870] env[63371]: value = "task-1773487" [ 1312.742870] env[63371]: _type = "Task" [ 1312.742870] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.752380] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a23760-05fc-6987-0f07-43dffb1c2253, 'name': SearchDatastore_Task, 'duration_secs': 0.00871} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.753551] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f20713a8-f594-4493-b33f-e146f5e0bba1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.761710] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.765040] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1312.765040] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1a82-6c75-a530-4c02-dbbeaa8cf2b7" [ 1312.765040] env[63371]: _type = "Task" [ 1312.765040] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.775586] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1a82-6c75-a530-4c02-dbbeaa8cf2b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.875823] env[63371]: DEBUG nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Received event network-vif-plugged-969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1312.876106] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Acquiring lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.876285] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.876458] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.876597] env[63371]: DEBUG nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] No waiting events found dispatching network-vif-plugged-969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1312.876863] env[63371]: WARNING nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Received unexpected event network-vif-plugged-969cd918-b804-4635-a828-8235c720e31b for instance with vm_state building and task_state spawning. [ 1312.878024] env[63371]: DEBUG nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Received event network-changed-969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1312.878311] env[63371]: DEBUG nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Refreshing instance network info cache due to event network-changed-969cd918-b804-4635-a828-8235c720e31b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1312.878963] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Acquiring lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1313.076176] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1313.186988] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.224128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.224128] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance network_info: |[{"id": "969cd918-b804-4635-a828-8235c720e31b", "address": "fa:16:3e:9c:ad:dd", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969cd918-b8", "ovs_interfaceid": "969cd918-b804-4635-a828-8235c720e31b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1313.225588] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Acquired lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.225588] env[63371]: DEBUG nova.network.neutron [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Refreshing network info cache for port 969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.225714] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:ad:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '969cd918-b804-4635-a828-8235c720e31b', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1313.239310] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating folder: Project (28cc236260a947899c5e09bca25f7360). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1313.243066] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-301ae52a-c1cb-4fba-bff3-d3469bee6735 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.255036] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773487, 'name': ReconfigVM_Task, 'duration_secs': 0.374237} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.258915] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Reconfigured VM instance instance-00000001 to attach disk [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa/cffe6a79-ad7e-4488-b179-608a03c978aa.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1313.259678] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created folder: Project (28cc236260a947899c5e09bca25f7360) in parent group-v368199. [ 1313.259891] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating folder: Instances. Parent ref: group-v368215. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1313.262280] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce3087ec-f1ee-44c9-9f15-d3608093e784 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.264045] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d48b7f50-0634-47d2-bd35-0c6fac19b09c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.273097] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1313.273097] env[63371]: value = "task-1773493" [ 1313.273097] env[63371]: _type = "Task" [ 1313.273097] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.279713] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1a82-6c75-a530-4c02-dbbeaa8cf2b7, 'name': SearchDatastore_Task, 'duration_secs': 0.020036} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.281369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.281623] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094/3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1313.281882] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created folder: Instances in parent group-v368215. [ 1313.282102] env[63371]: DEBUG oslo.service.loopingcall [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1313.284578] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.284767] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1313.284974] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e879e347-77f6-4f0f-a378-6059c88fe9e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.286811] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1313.292108] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa098e64-8b7b-4912-81f9-c49fe26d966f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.292422] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f486bc8-38c7-4570-8fc9-cf9bf6bb81a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.309318] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773493, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.320237] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1313.320237] env[63371]: value = "task-1773495" [ 1313.320237] env[63371]: _type = "Task" [ 1313.320237] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.320237] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1313.320237] env[63371]: value = "task-1773496" [ 1313.320237] env[63371]: _type = "Task" [ 1313.320237] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.330440] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.335863] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773496, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.352165] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f24f5a0-8bca-4972-a0ed-f5d8593f16b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.359407] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91763cbf-a9d6-4623-aec9-584ac0fabdd2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.394448] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b63d2d8-a0cd-47cb-ab02-417da021e010 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.401065] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af63a8d-11ae-4b00-9ca9-90c50f909358 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.417144] env[63371]: DEBUG nova.compute.provider_tree [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.441032] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1313.441128] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1313.441890] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d36fc16-ec9e-4f1e-ab1a-ab6f23823af5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.447980] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1313.447980] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a5dbd9-9717-0c2d-4958-61f637cce207" [ 1313.447980] env[63371]: _type = "Task" [ 1313.447980] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.458073] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a5dbd9-9717-0c2d-4958-61f637cce207, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.659731] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Updated VIF entry in instance network info cache for port 8905eb18-7130-4195-b35c-38e03dd31b91. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.659731] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Updating instance_info_cache with network_info: [{"id": "8905eb18-7130-4195-b35c-38e03dd31b91", "address": "fa:16:3e:89:e7:88", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8905eb18-71", "ovs_interfaceid": "8905eb18-7130-4195-b35c-38e03dd31b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.690190] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.784781] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773493, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.833665] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.836786] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773496, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.858974] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Successfully created port: 9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1313.921123] env[63371]: DEBUG nova.scheduler.client.report [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1313.967131] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a5dbd9-9717-0c2d-4958-61f637cce207, 'name': SearchDatastore_Task, 'duration_secs': 0.154322} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.967131] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4b8890b-ebe2-4206-89a6-250b9946ff8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.973460] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1313.973460] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5272510e-b75d-1470-7fbf-871eeef952a5" [ 1313.973460] env[63371]: _type = "Task" [ 1313.973460] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.982965] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5272510e-b75d-1470-7fbf-871eeef952a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.097103] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1314.128580] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1314.128822] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1314.129038] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1314.129384] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1314.129586] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1314.129745] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1314.130059] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1314.130361] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1314.130552] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1314.130734] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1314.130946] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1314.132240] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6add7197-36cf-4ca9-8bac-1b5b808ac8d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.145419] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659c17b4-8f18-4dd4-ac87-b00082902677 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.162116] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Releasing lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.162978] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Received event network-vif-plugged-eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1314.162978] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "ca202079-2eae-441e-80f6-e403497e137d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.163257] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Lock "ca202079-2eae-441e-80f6-e403497e137d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.163746] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Lock "ca202079-2eae-441e-80f6-e403497e137d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.164095] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] No waiting events found dispatching network-vif-plugged-eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1314.164956] env[63371]: WARNING nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Received unexpected event network-vif-plugged-eb0a9632-9bb3-4855-8ad5-af6c7a628900 for instance with vm_state building and task_state spawning. [ 1314.164956] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Received event network-changed-eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1314.164956] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Refreshing instance network info cache due to event network-changed-eb0a9632-9bb3-4855-8ad5-af6c7a628900. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1314.164956] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.165323] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquired lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.165544] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Refreshing network info cache for port eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1314.195486] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.561322} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.195682] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] a43fed87-5205-4148-834e-66778a90b7bc/a43fed87-5205-4148-834e-66778a90b7bc.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1314.196310] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1314.196310] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1682dd79-5a53-4da5-a717-824805eea1ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.205148] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1314.205148] env[63371]: value = "task-1773497" [ 1314.205148] env[63371]: _type = "Task" [ 1314.205148] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.219116] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.220217] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Successfully updated port: 6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1314.284362] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773493, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.311505] env[63371]: DEBUG nova.network.neutron [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Updated VIF entry in instance network info cache for port 969cd918-b804-4635-a828-8235c720e31b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1314.311898] env[63371]: DEBUG nova.network.neutron [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Updating instance_info_cache with network_info: [{"id": "969cd918-b804-4635-a828-8235c720e31b", "address": "fa:16:3e:9c:ad:dd", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969cd918-b8", "ovs_interfaceid": "969cd918-b804-4635-a828-8235c720e31b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.333582] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773495, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.341291] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773496, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.428604] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.429200] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1314.431968] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.642s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.436295] env[63371]: INFO nova.compute.claims [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1314.490429] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5272510e-b75d-1470-7fbf-871eeef952a5, 'name': SearchDatastore_Task, 'duration_secs': 0.069944} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.491095] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.494542] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ca202079-2eae-441e-80f6-e403497e137d/ca202079-2eae-441e-80f6-e403497e137d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1314.494542] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e92d115-110a-4401-8f93-c4a828607479 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.501181] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1314.501181] env[63371]: value = "task-1773498" [ 1314.501181] env[63371]: _type = "Task" [ 1314.501181] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.511957] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773498, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.716746] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.724288] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.724288] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.724288] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.787254] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773493, 'name': Rename_Task, 'duration_secs': 1.142131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.790011] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1314.790528] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e3502c6-7c5c-4f2c-ae5f-7301b87360d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.796551] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1314.796551] env[63371]: value = "task-1773499" [ 1314.796551] env[63371]: _type = "Task" [ 1314.796551] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.806659] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.814919] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Releasing lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.832769] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773495, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.415706} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.836282] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094/3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1314.836282] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1314.836282] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7985609b-4781-458e-9e04-130a6c48afbf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.839882] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773496, 'name': CreateVM_Task, 'duration_secs': 1.489722} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.840368] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1314.841784] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.842030] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.842337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1314.842588] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0018b442-8b4e-4f8c-ab33-f9d04b44e4f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.845381] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1314.845381] env[63371]: value = "task-1773500" [ 1314.845381] env[63371]: _type = "Task" [ 1314.845381] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.851492] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1314.851492] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5255aac8-6cbb-f376-aef9-39e8a919977c" [ 1314.851492] env[63371]: _type = "Task" [ 1314.851492] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.862304] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.871877] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5255aac8-6cbb-f376-aef9-39e8a919977c, 'name': SearchDatastore_Task, 'duration_secs': 0.012308} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.871877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.871877] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1314.871877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.872200] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.872200] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1314.872200] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e06c34a8-afee-4e77-bfa8-67068edc5bb3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.881433] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1314.882683] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1314.882683] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9294025-dc5d-4c7a-b072-5ce2e8057cb6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.888918] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1314.888918] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5245285a-70cc-deb8-092a-014866beb7f3" [ 1314.888918] env[63371]: _type = "Task" [ 1314.888918] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.899212] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5245285a-70cc-deb8-092a-014866beb7f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.942235] env[63371]: DEBUG nova.compute.utils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1314.949781] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1314.949781] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1315.018642] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773498, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.098772] env[63371]: DEBUG nova.policy [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c56ea345388e4739ae655edfa839c305', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c99d37d52edb40f99efb471da50f5845', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1315.137501] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Updated VIF entry in instance network info cache for port eb0a9632-9bb3-4855-8ad5-af6c7a628900. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1315.137501] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Updating instance_info_cache with network_info: [{"id": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "address": "fa:16:3e:4b:62:86", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0a9632-9b", "ovs_interfaceid": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.217098] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773497, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.562529} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.217382] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1315.218225] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271f3be6-5551-4c4e-acb7-17abca81522c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.244378] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] a43fed87-5205-4148-834e-66778a90b7bc/a43fed87-5205-4148-834e-66778a90b7bc.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1315.244742] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86edc88a-6525-4ed9-82ec-d188473f528d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.268993] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1315.268993] env[63371]: value = "task-1773501" [ 1315.268993] env[63371]: _type = "Task" [ 1315.268993] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.279032] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.306793] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773499, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.326399] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1315.365630] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091117} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.365630] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1315.366262] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e419c82-6260-427c-84c9-81741868e445 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.409915] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094/3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1315.412435] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6da92b2c-1281-4140-b678-f9f49f335d8c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.432374] env[63371]: DEBUG nova.compute.manager [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Received event network-vif-plugged-6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1315.432662] env[63371]: DEBUG oslo_concurrency.lockutils [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] Acquiring lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.432939] env[63371]: DEBUG oslo_concurrency.lockutils [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] Lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.433186] env[63371]: DEBUG oslo_concurrency.lockutils [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] Lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1315.433780] env[63371]: DEBUG nova.compute.manager [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] No waiting events found dispatching network-vif-plugged-6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1315.433780] env[63371]: WARNING nova.compute.manager [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Received unexpected event network-vif-plugged-6c410064-2e43-498a-bc47-de2e9ed224f0 for instance with vm_state building and task_state spawning. [ 1315.439776] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1315.439776] env[63371]: value = "task-1773502" [ 1315.439776] env[63371]: _type = "Task" [ 1315.439776] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.448187] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5245285a-70cc-deb8-092a-014866beb7f3, 'name': SearchDatastore_Task, 'duration_secs': 0.012} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.450990] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1315.453933] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c70af115-9180-412e-ab39-caaa67f7ab27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.465773] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773502, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.475548] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1315.475548] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5219e93d-3387-657e-f89f-ed0cc527a85a" [ 1315.475548] env[63371]: _type = "Task" [ 1315.475548] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.488539] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5219e93d-3387-657e-f89f-ed0cc527a85a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.523054] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773498, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.640589] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Releasing lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.682097] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Successfully created port: a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1315.705912] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.706430] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.762785] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9f0a41-f215-4208-9ee9-6f3220211dde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.775267] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911d1336-02eb-4475-bbe6-c5cfc58b8d41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.783463] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773501, 'name': ReconfigVM_Task, 'duration_secs': 0.508828} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.810528] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Reconfigured VM instance instance-00000002 to attach disk [datastore1] a43fed87-5205-4148-834e-66778a90b7bc/a43fed87-5205-4148-834e-66778a90b7bc.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1315.811785] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b39159c2-2ac8-45b6-bb1e-4eeb09861dd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.816789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01609b7-ca11-4d2e-b993-7338b9a6523f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.828172] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773499, 'name': PowerOnVM_Task, 'duration_secs': 0.510147} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.829741] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1315.830437] env[63371]: INFO nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Took 15.72 seconds to spawn the instance on the hypervisor. [ 1315.830725] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1315.831213] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1315.831213] env[63371]: value = "task-1773503" [ 1315.831213] env[63371]: _type = "Task" [ 1315.831213] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.832287] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415b918e-d2ef-4b14-98d9-fb675d761842 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.837330] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40facd7d-18fd-4b1c-8058-0d9e8583b717 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.861624] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1315.863146] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773503, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.967405] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773502, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.970461] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Updating instance_info_cache with network_info: [{"id": "6c410064-2e43-498a-bc47-de2e9ed224f0", "address": "fa:16:3e:47:47:ba", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c410064-2e", "ovs_interfaceid": "6c410064-2e43-498a-bc47-de2e9ed224f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.993518] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5219e93d-3387-657e-f89f-ed0cc527a85a, 'name': SearchDatastore_Task, 'duration_secs': 0.067625} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.993811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.994110] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97/fc0715a1-a056-4a1b-a86e-959680effc97.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1315.994393] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3071d6d-c8f8-496d-8b44-ef41c7f89c05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.008773] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1316.008773] env[63371]: value = "task-1773504" [ 1316.008773] env[63371]: _type = "Task" [ 1316.008773] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.035991] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773498, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.09842} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.039966] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ca202079-2eae-441e-80f6-e403497e137d/ca202079-2eae-441e-80f6-e403497e137d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1316.040267] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1316.040545] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.040760] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1569155d-6fec-4efd-a9ab-1049caf03170 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.048283] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1316.048283] env[63371]: value = "task-1773505" [ 1316.048283] env[63371]: _type = "Task" [ 1316.048283] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.057671] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.350677] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773503, 'name': Rename_Task, 'duration_secs': 0.231559} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.350959] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1316.351225] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0759fa23-7dfd-4e8b-a33e-a91a604779b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.358717] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1316.358717] env[63371]: value = "task-1773507" [ 1316.358717] env[63371]: _type = "Task" [ 1316.358717] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.375509] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.379034] env[63371]: INFO nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Took 20.55 seconds to build instance. [ 1316.396354] env[63371]: ERROR nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-10f1947e-396d-43ed-9d12-a2fbd1678a9d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-10f1947e-396d-43ed-9d12-a2fbd1678a9d"}]} [ 1316.421508] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1316.438687] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1316.438973] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1316.456723] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773502, 'name': ReconfigVM_Task, 'duration_secs': 0.544916} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.456723] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094/3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1316.457390] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1530288-b322-4c2e-a2dd-6eb99be72fc3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.465075] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1316.465075] env[63371]: value = "task-1773508" [ 1316.465075] env[63371]: _type = "Task" [ 1316.465075] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.467198] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1316.473537] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1316.476096] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.476441] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance network_info: |[{"id": "6c410064-2e43-498a-bc47-de2e9ed224f0", "address": "fa:16:3e:47:47:ba", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c410064-2e", "ovs_interfaceid": "6c410064-2e43-498a-bc47-de2e9ed224f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1316.482601] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:47:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c410064-2e43-498a-bc47-de2e9ed224f0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1316.494422] env[63371]: DEBUG oslo.service.loopingcall [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1316.497744] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Successfully updated port: 9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1316.497744] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1316.497744] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-262aa6e6-7e1e-4e3c-9152-c309aa805323 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.520458] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773508, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.522162] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1316.531556] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1316.531556] env[63371]: value = "task-1773509" [ 1316.531556] env[63371]: _type = "Task" [ 1316.531556] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.536023] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1316.537682] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1316.537682] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1316.537891] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1316.537968] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1316.538366] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1316.540000] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.540000] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a881a3-26a4-4e1d-96e0-46ecab7cc855 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.557192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dc2327-1b06-4cb6-bce0-8899e093c89f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.562542] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773509, 'name': CreateVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.571647] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.786589] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69f4768-e7d7-45ae-9a92-7ebc24c220ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.800847] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00bb075-cd39-4342-93d6-56df6336e36b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.834834] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61539bb-54df-42c7-bba0-3017e55d70a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.846030] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8669a5b9-b1d8-4e33-aec0-de9b81df5756 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.862392] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1316.877688] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.881475] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.062s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.983994] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773508, 'name': Rename_Task, 'duration_secs': 0.221501} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.983994] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1316.985454] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1cbc91a-d5dc-4404-a341-94754d0f43cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.990905] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1316.990905] env[63371]: value = "task-1773510" [ 1316.990905] env[63371]: _type = "Task" [ 1316.990905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.007026] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.015928] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.016105] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquired lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.016424] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1317.032572] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773504, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.054022] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773509, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.062374] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.634072} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.062374] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1317.062981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c4b8b5-c4a5-46e6-b69d-4d5b81bc59d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.093904] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] ca202079-2eae-441e-80f6-e403497e137d/ca202079-2eae-441e-80f6-e403497e137d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1317.094262] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bcbd397-c7b4-47b2-8ea2-d231b41a26ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.119877] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1317.119877] env[63371]: value = "task-1773511" [ 1317.119877] env[63371]: _type = "Task" [ 1317.119877] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.132820] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773511, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.383051] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.385913] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1317.424250] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 18 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1317.424250] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 18 to 19 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1317.424600] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1317.508744] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773510, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.535376] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773504, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.205546} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.535854] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97/fc0715a1-a056-4a1b-a86e-959680effc97.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1317.535854] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1317.536927] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f96db72-167b-4c66-a22a-bae963c5229d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.545521] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1317.545521] env[63371]: value = "task-1773512" [ 1317.545521] env[63371]: _type = "Task" [ 1317.545521] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.555489] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773509, 'name': CreateVM_Task, 'duration_secs': 0.610202} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.555489] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1317.555489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.555657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.556251] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1317.559568] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16793177-b2db-40c3-a5ea-43c3018cca52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.561432] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773512, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.565455] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1317.565455] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b252b6-d399-3395-0387-6b31e85c5a9b" [ 1317.565455] env[63371]: _type = "Task" [ 1317.565455] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.575526] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b252b6-d399-3395-0387-6b31e85c5a9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.628639] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1317.641480] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.886648] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.928240] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.929371] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.497s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.929449] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1317.935035] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.685s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.936400] env[63371]: INFO nova.compute.claims [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1318.005755] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773510, 'name': PowerOnVM_Task, 'duration_secs': 0.88435} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.006731] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1318.007029] env[63371]: INFO nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Took 13.42 seconds to spawn the instance on the hypervisor. [ 1318.007266] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1318.008574] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc0a7e8-cff7-4a77-848e-37150d06e344 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.035555] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.035555] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.060352] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773512, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073724} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.060352] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1318.060781] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c841bfcd-140b-47dd-88c1-8d4a24ddc4ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.088071] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97/fc0715a1-a056-4a1b-a86e-959680effc97.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1318.093209] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09f245f1-3a82-4b7a-915b-074e68f82680 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.118773] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b252b6-d399-3395-0387-6b31e85c5a9b, 'name': SearchDatastore_Task, 'duration_secs': 0.011057} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.119395] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.119632] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1318.119880] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1318.119996] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.120193] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1318.120527] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1318.120527] env[63371]: value = "task-1773513" [ 1318.120527] env[63371]: _type = "Task" [ 1318.120527] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.120735] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40f91fc1-b6c9-4a68-838d-0070cb180b91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.143384] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.143384] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1318.143384] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1318.144023] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.144946] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-854dc114-1807-4ebb-b827-14fdc8467d0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.155519] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1318.155519] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52eec37f-5ae0-156c-9c8f-147f5d972533" [ 1318.155519] env[63371]: _type = "Task" [ 1318.155519] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.165611] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eec37f-5ae0-156c-9c8f-147f5d972533, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.344774] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [{"id": "9a4b63df-9697-47a1-81ad-c69476a80975", "address": "fa:16:3e:ca:f3:37", "network": {"id": "5b9593c6-3e8e-4b0f-ad69-daf3e2419d2c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-90261722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e7f96aff7d240469616d256291f7081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4b63df-96", "ovs_interfaceid": "9a4b63df-9697-47a1-81ad-c69476a80975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.377732] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task, 'duration_secs': 1.868903} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.378196] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1318.378555] env[63371]: INFO nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Took 16.05 seconds to spawn the instance on the hypervisor. [ 1318.378796] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1318.379700] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024dfc76-e072-4ea4-aa92-84f998fff92c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.448192] env[63371]: DEBUG nova.compute.utils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1318.451464] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1318.451939] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1318.553094] env[63371]: INFO nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Took 20.32 seconds to build instance. [ 1318.637530] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773511, 'name': ReconfigVM_Task, 'duration_secs': 1.276785} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.641597] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Reconfigured VM instance instance-00000004 to attach disk [datastore1] ca202079-2eae-441e-80f6-e403497e137d/ca202079-2eae-441e-80f6-e403497e137d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.642177] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773513, 'name': ReconfigVM_Task, 'duration_secs': 0.346032} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.643709] env[63371]: DEBUG nova.policy [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6aa709a53564231ac25fb3e878239ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c76a64c712ca4aa98c19600ef0469855', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1318.647724] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4b71038-136f-4f88-8d2e-e773c92d7e46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.647724] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Reconfigured VM instance instance-00000005 to attach disk [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97/fc0715a1-a056-4a1b-a86e-959680effc97.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.649182] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64ccedd5-35ed-4c22-8ce6-163317863c19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.656870] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1318.656870] env[63371]: value = "task-1773515" [ 1318.656870] env[63371]: _type = "Task" [ 1318.656870] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.663753] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1318.663753] env[63371]: value = "task-1773516" [ 1318.663753] env[63371]: _type = "Task" [ 1318.663753] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.676035] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eec37f-5ae0-156c-9c8f-147f5d972533, 'name': SearchDatastore_Task, 'duration_secs': 0.019331} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.677148] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b98242fc-28fc-49fb-b348-a815b9e051dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.686403] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773516, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.686655] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773515, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.690568] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1318.690568] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1ad91-cc99-a613-6c1f-74982c743dce" [ 1318.690568] env[63371]: _type = "Task" [ 1318.690568] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.705192] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1ad91-cc99-a613-6c1f-74982c743dce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.814310] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Successfully updated port: a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1318.848239] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Releasing lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.848561] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance network_info: |[{"id": "9a4b63df-9697-47a1-81ad-c69476a80975", "address": "fa:16:3e:ca:f3:37", "network": {"id": "5b9593c6-3e8e-4b0f-ad69-daf3e2419d2c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-90261722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e7f96aff7d240469616d256291f7081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4b63df-96", "ovs_interfaceid": "9a4b63df-9697-47a1-81ad-c69476a80975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1318.850122] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:f3:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11032cc2-b275-48d2-9c40-9455ea7d49e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a4b63df-9697-47a1-81ad-c69476a80975', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1318.861414] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Creating folder: Project (6e7f96aff7d240469616d256291f7081). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1318.862201] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-711e41d2-aa41-417b-bc04-eb2b2ce0e3ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.874797] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Created folder: Project (6e7f96aff7d240469616d256291f7081) in parent group-v368199. [ 1318.876045] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Creating folder: Instances. Parent ref: group-v368219. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1318.876419] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8b129e8-0f11-4bb5-93ac-2173db6c1851 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.901034] env[63371]: INFO nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Took 21.44 seconds to build instance. [ 1318.955028] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1319.054654] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.828s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.099164] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Received event network-changed-6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1319.099164] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Refreshing instance network info cache due to event network-changed-6c410064-2e43-498a-bc47-de2e9ed224f0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1319.099164] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquiring lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.100493] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquired lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.100493] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Refreshing network info cache for port 6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1319.194118] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773516, 'name': Rename_Task, 'duration_secs': 0.173219} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.194448] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773515, 'name': Rename_Task, 'duration_secs': 0.173766} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.199378] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1319.203640] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1319.203990] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Created folder: Instances in parent group-v368219. [ 1319.204341] env[63371]: DEBUG oslo.service.loopingcall [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1319.204843] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5389137-eec5-4f25-a8dd-1736830c9a70 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.207131] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60373b74-ae42-4e12-a764-3d7801e2b35d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.208913] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1319.209616] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b292ccc-e2c6-4b5a-be99-62cfa844b798 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.237073] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1ad91-cc99-a613-6c1f-74982c743dce, 'name': SearchDatastore_Task, 'duration_secs': 0.014018} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.238565] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1319.238836] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b/d9523239-79d1-434f-977a-e1f0e358c82b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1319.239280] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-157b708d-f9fa-41f7-8c25-cca0be2baa62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.247148] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1319.247148] env[63371]: value = "task-1773519" [ 1319.247148] env[63371]: _type = "Task" [ 1319.247148] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.247819] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1319.247819] env[63371]: value = "task-1773520" [ 1319.247819] env[63371]: _type = "Task" [ 1319.247819] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.270670] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1319.270670] env[63371]: value = "task-1773521" [ 1319.270670] env[63371]: _type = "Task" [ 1319.270670] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.284698] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1319.284698] env[63371]: value = "task-1773522" [ 1319.284698] env[63371]: _type = "Task" [ 1319.284698] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.284698] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773520, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.284698] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773519, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.296201] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.300873] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773522, 'name': CreateVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.317222] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269326ec-3a98-4c79-ac07-310ec839ac40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.324385] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.324385] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.324385] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.326898] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d37f49c-9cc8-4895-bfdd-5760a2479d9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.362768] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f72d96-5025-4dbf-8b0b-990776f07981 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.373028] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162f00fc-251b-4ca7-b91c-446408e43211 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.390268] env[63371]: DEBUG nova.compute.provider_tree [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.402572] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.958s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.561594] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1319.623766] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Successfully created port: 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1319.766527] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773519, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.771342] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773520, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.783422] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773521, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.797746] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773522, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.894655] env[63371]: DEBUG nova.scheduler.client.report [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1319.905922] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1319.963506] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1319.972265] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1320.002179] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1320.002780] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1320.002780] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1320.002979] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1320.003640] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1320.004407] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1320.004407] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1320.004407] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1320.004612] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1320.004674] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1320.004856] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1320.005928] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2057de31-113d-4407-b6ec-3d7dc223b0cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.021858] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736baa00-64da-449a-94bb-4f32f0741f9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.100033] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.281662] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773519, 'name': PowerOnVM_Task, 'duration_secs': 0.704688} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.281662] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773520, 'name': PowerOnVM_Task, 'duration_secs': 0.701654} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.281662] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1320.281662] env[63371]: INFO nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Took 10.97 seconds to spawn the instance on the hypervisor. [ 1320.281662] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1320.281921] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1320.281921] env[63371]: INFO nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Took 13.33 seconds to spawn the instance on the hypervisor. [ 1320.281921] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1320.282951] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a488976b-13df-4cda-95af-d82abfc906c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.289163] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f597473-ec22-4760-8fae-e0fd7c1e61b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.303570] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773521, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662567} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.312031] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b/d9523239-79d1-434f-977a-e1f0e358c82b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1320.312031] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1320.317314] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a00b80a-ba43-4d25-8fba-a40ed58729af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.321020] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773522, 'name': CreateVM_Task, 'duration_secs': 0.717645} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.321020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1320.321020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.321020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.321302] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1320.321544] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6147d35d-50c8-49f2-a7bd-7034ea2e3003 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.327238] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1320.327238] env[63371]: value = "task-1773524" [ 1320.327238] env[63371]: _type = "Task" [ 1320.327238] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.329579] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1320.329579] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c28846-3e70-cc8c-322e-986b783b46e8" [ 1320.329579] env[63371]: _type = "Task" [ 1320.329579] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.337065] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773524, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.345128] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c28846-3e70-cc8c-322e-986b783b46e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.404018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.404018] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1320.406406] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.478s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.408626] env[63371]: INFO nova.compute.claims [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1320.433462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.573012] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Updated VIF entry in instance network info cache for port 6c410064-2e43-498a-bc47-de2e9ed224f0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1320.573286] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Updating instance_info_cache with network_info: [{"id": "6c410064-2e43-498a-bc47-de2e9ed224f0", "address": "fa:16:3e:47:47:ba", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c410064-2e", "ovs_interfaceid": "6c410064-2e43-498a-bc47-de2e9ed224f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.647529] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Acquiring lock "cffe6a79-ad7e-4488-b179-608a03c978aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.647781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.648013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Acquiring lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.648456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.648456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.655703] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [{"id": "a2807b8c-5895-474a-9c75-58bd21982409", "address": "fa:16:3e:c2:68:79", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2807b8c-58", "ovs_interfaceid": "a2807b8c-5895-474a-9c75-58bd21982409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.657519] env[63371]: INFO nova.compute.manager [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Terminating instance [ 1320.662336] env[63371]: DEBUG nova.compute.manager [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1320.662336] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1320.662714] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d34f1be-4205-4836-8d3c-cd260c4ce37a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.677483] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1320.679279] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58714c54-4fab-44cc-9fb5-74a6a3d45963 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.687408] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Waiting for the task: (returnval){ [ 1320.687408] env[63371]: value = "task-1773525" [ 1320.687408] env[63371]: _type = "Task" [ 1320.687408] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.699228] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Task: {'id': task-1773525, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.838333] env[63371]: INFO nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Took 21.86 seconds to build instance. [ 1320.857555] env[63371]: INFO nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Took 21.52 seconds to build instance. [ 1320.871060] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c28846-3e70-cc8c-322e-986b783b46e8, 'name': SearchDatastore_Task, 'duration_secs': 0.015082} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.871060] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773524, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116525} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.872261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1320.872531] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1320.872773] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.873126] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.873126] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1320.873446] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1320.874073] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a47e442-3408-4a2c-8c40-de5f1f5f35f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.878768] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca90e3d3-c4fa-4a4e-a7b6-9bb4fcfe4e75 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.918613] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b/d9523239-79d1-434f-977a-e1f0e358c82b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1320.920364] env[63371]: DEBUG nova.compute.utils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1320.927563] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fec48d7-afa1-4b4e-88e8-531f9f74d187 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.946577] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1320.947046] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1320.954596] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1320.957930] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1320.958314] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1320.962102] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e04d798b-2c45-4a47-890c-d3dff6829115 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.971065] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1320.971065] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b5129a-7d8b-46e5-30e8-b73a4287f036" [ 1320.971065] env[63371]: _type = "Task" [ 1320.971065] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.971065] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1320.971065] env[63371]: value = "task-1773526" [ 1320.971065] env[63371]: _type = "Task" [ 1320.971065] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.983747] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b5129a-7d8b-46e5-30e8-b73a4287f036, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.988840] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.060630] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "362d8303-524a-457a-b8d9-2bad87fa816b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.060862] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.080831] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Releasing lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.080831] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received event network-vif-plugged-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1321.080831] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquiring lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.080831] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.080831] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.081503] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] No waiting events found dispatching network-vif-plugged-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1321.081503] env[63371]: WARNING nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received unexpected event network-vif-plugged-9a4b63df-9697-47a1-81ad-c69476a80975 for instance with vm_state building and task_state spawning. [ 1321.081503] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received event network-changed-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1321.081503] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Refreshing instance network info cache due to event network-changed-9a4b63df-9697-47a1-81ad-c69476a80975. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1321.081636] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquiring lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.081670] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquired lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.082743] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Refreshing network info cache for port 9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1321.112035] env[63371]: DEBUG nova.compute.manager [None req-8a79505d-7d85-432e-bd2a-761f17d60453 tempest-ServerDiagnosticsTest-1087031006 tempest-ServerDiagnosticsTest-1087031006-project-admin] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1321.112035] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd46880a-84b6-4020-a059-7cacd06dbf28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.117703] env[63371]: INFO nova.compute.manager [None req-8a79505d-7d85-432e-bd2a-761f17d60453 tempest-ServerDiagnosticsTest-1087031006 tempest-ServerDiagnosticsTest-1087031006-project-admin] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Retrieving diagnostics [ 1321.119373] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f94c5f0-ab3c-422e-ab4d-026e1e9ba5b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.169736] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.169949] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance network_info: |[{"id": "a2807b8c-5895-474a-9c75-58bd21982409", "address": "fa:16:3e:c2:68:79", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2807b8c-58", "ovs_interfaceid": "a2807b8c-5895-474a-9c75-58bd21982409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1321.170435] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:68:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2807b8c-5895-474a-9c75-58bd21982409', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1321.178237] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Creating folder: Project (c99d37d52edb40f99efb471da50f5845). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1321.179917] env[63371]: DEBUG nova.policy [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5bee716ea542f9a463941fa477a897', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9d19f4772ff46d3b3024851822cf833', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1321.184573] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-caf9924d-3c96-460b-9961-c07247672990 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.196182] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Created folder: Project (c99d37d52edb40f99efb471da50f5845) in parent group-v368199. [ 1321.196233] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Creating folder: Instances. Parent ref: group-v368222. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1321.196933] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc726227-2570-40aa-9505-f6a0f639681b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.205144] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Task: {'id': task-1773525, 'name': PowerOffVM_Task, 'duration_secs': 0.299199} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.206078] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.206161] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1321.206413] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e62cb396-1718-4cce-8b53-5ec803fcd5b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.215913] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Created folder: Instances in parent group-v368222. [ 1321.215913] env[63371]: DEBUG oslo.service.loopingcall [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1321.215913] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1321.216085] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0216c982-e174-4f87-9676-850002796b99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.238993] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1321.238993] env[63371]: value = "task-1773530" [ 1321.238993] env[63371]: _type = "Task" [ 1321.238993] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.247582] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773530, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.286421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1321.286421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1321.286421] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Deleting the datastore file [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1321.286800] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cee4e9e0-aceb-4e19-b3ac-d30d1b97fa62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.297430] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Waiting for the task: (returnval){ [ 1321.297430] env[63371]: value = "task-1773531" [ 1321.297430] env[63371]: _type = "Task" [ 1321.297430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.307073] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Task: {'id': task-1773531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.353624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.388s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.354483] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37631511-f400-495d-90e3-c34dd5440d8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.365316] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd62b276-f7be-4cc0-a687-25ff759144f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.400422] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.072s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.403602] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc57257f-c049-4d3d-b2c0-fb8d50a253f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.413416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5226efc-1bd5-4ab7-a335-4c8969954ed9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.442883] env[63371]: DEBUG nova.compute.provider_tree [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1321.496574] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b5129a-7d8b-46e5-30e8-b73a4287f036, 'name': SearchDatastore_Task, 'duration_secs': 0.013008} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.499500] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.499796] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-711f7c5a-997f-4954-8437-f99429b4b5a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.510313] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1321.510313] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5247eff4-c276-6012-6389-87fe72458061" [ 1321.510313] env[63371]: _type = "Task" [ 1321.510313] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.520728] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5247eff4-c276-6012-6389-87fe72458061, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.750757] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773530, 'name': CreateVM_Task, 'duration_secs': 0.416363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.751060] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1321.751726] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.751985] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.752336] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1321.752614] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-065e399e-6e58-447d-8356-a41965f39051 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.757366] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1321.757366] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524241e8-7066-5890-6e54-62c21f744114" [ 1321.757366] env[63371]: _type = "Task" [ 1321.757366] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.767449] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524241e8-7066-5890-6e54-62c21f744114, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.809077] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Task: {'id': task-1773531, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248108} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.809576] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1321.809962] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1321.810392] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1321.810753] env[63371]: INFO nova.compute.manager [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1321.811170] env[63371]: DEBUG oslo.service.loopingcall [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1321.811520] env[63371]: DEBUG nova.compute.manager [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1321.811776] env[63371]: DEBUG nova.network.neutron [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1321.860249] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1321.910832] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1321.947504] env[63371]: DEBUG nova.scheduler.client.report [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1321.972969] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1321.978386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "1924d3d2-cc88-4fd2-b509-8463da796658" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.978606] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.988847] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773526, 'name': ReconfigVM_Task, 'duration_secs': 0.601484} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.993707] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Reconfigured VM instance instance-00000006 to attach disk [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b/d9523239-79d1-434f-977a-e1f0e358c82b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1321.993707] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ac3b867-6cc1-436e-a7e3-2a7390d7f8e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.998750] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1321.998750] env[63371]: value = "task-1773532" [ 1321.998750] env[63371]: _type = "Task" [ 1321.998750] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.013685] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773532, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.015901] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1322.016223] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1322.016395] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1322.016580] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1322.016724] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1322.016927] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1322.017198] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1322.017339] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1322.017650] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1322.018055] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1322.018257] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1322.022548] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df748729-8d12-458e-aaa2-1e88619c0702 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.036728] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89136a50-ec0d-4a80-88d4-2130a2c3999b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.041642] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5247eff4-c276-6012-6389-87fe72458061, 'name': SearchDatastore_Task, 'duration_secs': 0.01192} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.041899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.043686] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43/ca53accc-a15f-4503-87e5-7cbf3e2c0b43.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1322.043686] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1531be4e-cd78-4f38-b2ca-a64b4a472f18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.058557] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1322.058557] env[63371]: value = "task-1773533" [ 1322.058557] env[63371]: _type = "Task" [ 1322.058557] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.068860] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.276078] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524241e8-7066-5890-6e54-62c21f744114, 'name': SearchDatastore_Task, 'duration_secs': 0.035074} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.276644] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.276727] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1322.277034] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.277140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.277318] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1322.277598] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eabd07d2-d810-4a81-aa8e-1f207c32d698 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.287584] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1322.287780] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1322.288824] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87c601eb-5e37-4fb4-ab26-f40428a05cf5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.295506] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1322.295506] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e91813-47a5-646c-2b2f-ff92f30dc873" [ 1322.295506] env[63371]: _type = "Task" [ 1322.295506] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.305031] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e91813-47a5-646c-2b2f-ff92f30dc873, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.396404] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.445935] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.454492] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.048s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.455040] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1322.459269] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.360s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1322.460292] env[63371]: INFO nova.compute.claims [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1322.515444] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773532, 'name': Rename_Task, 'duration_secs': 0.213494} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.518036] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1322.518036] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-baccbaa9-8d3c-4ad5-a737-40f2d4f9c84d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.530904] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1322.530904] env[63371]: value = "task-1773535" [ 1322.530904] env[63371]: _type = "Task" [ 1322.530904] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.547617] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773535, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.574512] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773533, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.754213] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updated VIF entry in instance network info cache for port 9a4b63df-9697-47a1-81ad-c69476a80975. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1322.754768] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [{"id": "9a4b63df-9697-47a1-81ad-c69476a80975", "address": "fa:16:3e:ca:f3:37", "network": {"id": "5b9593c6-3e8e-4b0f-ad69-daf3e2419d2c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-90261722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e7f96aff7d240469616d256291f7081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4b63df-96", "ovs_interfaceid": "9a4b63df-9697-47a1-81ad-c69476a80975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.771697] env[63371]: DEBUG nova.network.neutron [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.811176] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e91813-47a5-646c-2b2f-ff92f30dc873, 'name': SearchDatastore_Task, 'duration_secs': 0.017467} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.812438] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0689f42a-ec50-47e7-bf7c-36c4a12c23fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.820438] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1322.820438] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cdd6ff-65bd-ef0e-91c4-d14b039510da" [ 1322.820438] env[63371]: _type = "Task" [ 1322.820438] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.832735] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cdd6ff-65bd-ef0e-91c4-d14b039510da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.965100] env[63371]: DEBUG nova.compute.utils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1322.973421] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Successfully created port: d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1322.977001] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1322.977001] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1323.047353] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773535, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.076216] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758741} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.076778] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43/ca53accc-a15f-4503-87e5-7cbf3e2c0b43.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1323.076778] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1323.077169] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8203562-1cfd-4620-b210-d6601baa2413 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.086854] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1323.086854] env[63371]: value = "task-1773536" [ 1323.086854] env[63371]: _type = "Task" [ 1323.086854] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.107501] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.206869] env[63371]: DEBUG nova.policy [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3dec49b67cd49159192b5c2756fc2e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f2fde472b14ab9a4d20947ca714191', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1323.259642] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Releasing lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.277302] env[63371]: INFO nova.compute.manager [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Took 1.47 seconds to deallocate network for instance. [ 1323.334026] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cdd6ff-65bd-ef0e-91c4-d14b039510da, 'name': SearchDatastore_Task, 'duration_secs': 0.011107} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.334026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.334026] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535/47c1c242-d190-4523-8033-307c5a9b7535.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1323.336079] env[63371]: DEBUG nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received event network-vif-plugged-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1323.336577] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Acquiring lock "47c1c242-d190-4523-8033-307c5a9b7535-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.337121] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Lock "47c1c242-d190-4523-8033-307c5a9b7535-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.337396] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Lock "47c1c242-d190-4523-8033-307c5a9b7535-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.337694] env[63371]: DEBUG nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] No waiting events found dispatching network-vif-plugged-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1323.337963] env[63371]: WARNING nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received unexpected event network-vif-plugged-a2807b8c-5895-474a-9c75-58bd21982409 for instance with vm_state building and task_state spawning. [ 1323.338569] env[63371]: DEBUG nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received event network-changed-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1323.339376] env[63371]: DEBUG nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Refreshing instance network info cache due to event network-changed-a2807b8c-5895-474a-9c75-58bd21982409. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1323.339376] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Acquiring lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.339902] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Acquired lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.340198] env[63371]: DEBUG nova.network.neutron [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Refreshing network info cache for port a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.343017] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60988934-5b4b-4b08-82c7-abde65c21d4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.351592] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1323.351592] env[63371]: value = "task-1773537" [ 1323.351592] env[63371]: _type = "Task" [ 1323.351592] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.363289] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.474959] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Successfully updated port: 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1323.476983] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1323.547057] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773535, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.604251] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.200019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.611108] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1323.611492] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd1fc60-54f4-4915-91c3-16c49efcdcee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.641278] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43/ca53accc-a15f-4503-87e5-7cbf3e2c0b43.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1323.643964] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee86346d-986b-4e5c-8188-835f3a2eb012 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.674942] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1323.674942] env[63371]: value = "task-1773538" [ 1323.674942] env[63371]: _type = "Task" [ 1323.674942] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.695529] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.741442] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.741873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.788701] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.873631] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773537, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.931306] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434070a5-0654-4deb-bca1-18adf2d16e59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.944380] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c3386f-676a-4f6b-bb64-dce8846172f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.983883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.984057] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.984195] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1323.987052] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13f024c-020c-45f6-9f91-93db96d92530 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.000860] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d286aba1-6d15-4590-b868-b682a880cf78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.022168] env[63371]: DEBUG nova.compute.provider_tree [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1324.045576] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773535, 'name': PowerOnVM_Task, 'duration_secs': 1.181002} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.045860] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1324.046079] env[63371]: INFO nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Took 12.34 seconds to spawn the instance on the hypervisor. [ 1324.046263] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1324.047088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59a3b1b-053d-43db-9dd8-55f351e32061 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.184576] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.214631] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Successfully created port: 17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1324.372493] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.878983} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.372858] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535/47c1c242-d190-4523-8033-307c5a9b7535.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1324.373114] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1324.373509] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f61ca92c-979c-4691-ac81-8b3ae4e94d61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.380601] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1324.380601] env[63371]: value = "task-1773539" [ 1324.380601] env[63371]: _type = "Task" [ 1324.380601] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.389371] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.503551] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1324.527754] env[63371]: DEBUG nova.scheduler.client.report [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1324.537768] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1324.538529] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1324.538529] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1324.538529] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1324.538745] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1324.538745] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1324.538965] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1324.540036] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1324.540270] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1324.540467] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1324.540669] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1324.541612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7753c05c-58cf-40e7-896f-efc0416dc3cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.545962] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1324.559461] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6c300d-0a35-49e7-9ff0-03a037138ad1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.586035] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.586286] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.586508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.586676] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.586833] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.591342] env[63371]: INFO nova.compute.manager [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Terminating instance [ 1324.592682] env[63371]: INFO nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Took 25.20 seconds to build instance. [ 1324.595308] env[63371]: DEBUG nova.compute.manager [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1324.595531] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1324.599104] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5174f20-05fc-4ab4-bb6b-4e78716d234c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.608486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1324.609176] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49ede7d8-11a6-4806-9bbd-c91a9221565f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.616929] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1324.616929] env[63371]: value = "task-1773540" [ 1324.616929] env[63371]: _type = "Task" [ 1324.616929] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.626080] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773540, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.686545] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.745307] env[63371]: DEBUG nova.network.neutron [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updated VIF entry in instance network info cache for port a2807b8c-5895-474a-9c75-58bd21982409. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1324.745708] env[63371]: DEBUG nova.network.neutron [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [{"id": "a2807b8c-5895-474a-9c75-58bd21982409", "address": "fa:16:3e:c2:68:79", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2807b8c-58", "ovs_interfaceid": "a2807b8c-5895-474a-9c75-58bd21982409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.890915] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073033} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.893853] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1324.893853] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd42433-4a47-4bde-8a1c-857f522624a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.898169] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.920243] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535/47c1c242-d190-4523-8033-307c5a9b7535.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1324.921313] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.921619] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Instance network_info: |[{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1324.921865] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ceca282d-c3d6-4733-a1c6-0e9d32919382 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.941642] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:29:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1324.949480] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Creating folder: Project (c76a64c712ca4aa98c19600ef0469855). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.950235] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-705c0b29-324c-420d-aebc-0bd636dad1fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.959102] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1324.959102] env[63371]: value = "task-1773541" [ 1324.959102] env[63371]: _type = "Task" [ 1324.959102] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.964102] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Created folder: Project (c76a64c712ca4aa98c19600ef0469855) in parent group-v368199. [ 1324.964102] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Creating folder: Instances. Parent ref: group-v368226. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.964102] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09496150-cd8c-4c2d-95a3-73e77d489a27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.971844] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.972812] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Created folder: Instances in parent group-v368226. [ 1324.973087] env[63371]: DEBUG oslo.service.loopingcall [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1324.973287] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1324.973418] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71b04bdc-0c19-4634-81aa-a09d166e8405 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.995019] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1324.995019] env[63371]: value = "task-1773544" [ 1324.995019] env[63371]: _type = "Task" [ 1324.995019] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.004459] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773544, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.032775] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.033547] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1325.037575] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.604s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.040653] env[63371]: INFO nova.compute.claims [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1325.090597] env[63371]: DEBUG nova.compute.manager [req-ec4c8eca-3d79-4f66-b7ac-c3803439d849 req-1ec8a08e-d0b0-4aaa-8293-251ff9be0c69 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Received event network-vif-deleted-d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1325.102817] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.722s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.132464] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773540, 'name': PowerOffVM_Task, 'duration_secs': 0.19529} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.132728] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1325.132909] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1325.133189] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5419c0ca-8043-488e-8572-bb180bf2a8b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.183496] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773538, 'name': ReconfigVM_Task, 'duration_secs': 1.180942} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.183786] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Reconfigured VM instance instance-00000007 to attach disk [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43/ca53accc-a15f-4503-87e5-7cbf3e2c0b43.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1325.184443] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2427cd17-3a7f-42b9-8bd1-e88857b0b422 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.194300] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1325.194300] env[63371]: value = "task-1773546" [ 1325.194300] env[63371]: _type = "Task" [ 1325.194300] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.205088] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773546, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.247497] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1325.247804] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1325.248088] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Deleting the datastore file [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.248462] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0aac21a0-476d-4de6-9a5c-8c7bd5f44aaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.252930] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Releasing lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.259504] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1325.259504] env[63371]: value = "task-1773547" [ 1325.259504] env[63371]: _type = "Task" [ 1325.259504] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.277238] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.473549] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773541, 'name': ReconfigVM_Task, 'duration_secs': 0.284713} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.473549] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535/47c1c242-d190-4523-8033-307c5a9b7535.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1325.473771] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e19b5131-07d8-4978-9471-835168003719 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.481104] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1325.481104] env[63371]: value = "task-1773548" [ 1325.481104] env[63371]: _type = "Task" [ 1325.481104] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.489911] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773548, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.507338] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773544, 'name': CreateVM_Task, 'duration_secs': 0.434609} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.507724] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1325.508533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.508878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.509328] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1325.510348] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a15459b-32a6-4ad8-be40-4e64331bc6eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.517284] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1325.517284] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5282bf79-ac01-453f-116d-726e6ec68cab" [ 1325.517284] env[63371]: _type = "Task" [ 1325.517284] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.530154] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5282bf79-ac01-453f-116d-726e6ec68cab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.546419] env[63371]: DEBUG nova.compute.utils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1325.551563] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1325.551563] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1325.610035] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1325.678040] env[63371]: DEBUG nova.policy [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a8a6cea5f2a4400b8f3bb15101ae129', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a9101ae72864e0b8af6c598153ff40e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1325.707224] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773546, 'name': Rename_Task, 'duration_secs': 0.226197} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.707224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1325.707224] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e8cceb4-f559-43b6-97af-0f8d3ff3a35c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.717145] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1325.717145] env[63371]: value = "task-1773549" [ 1325.717145] env[63371]: _type = "Task" [ 1325.717145] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.723516] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.754553] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.757140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.776582] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305787} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.776582] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.776582] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1325.776582] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1325.776582] env[63371]: INFO nova.compute.manager [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1325.776818] env[63371]: DEBUG oslo.service.loopingcall [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1325.778676] env[63371]: DEBUG nova.compute.manager [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1325.778676] env[63371]: DEBUG nova.network.neutron [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1325.944837] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Successfully updated port: d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1325.991437] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773548, 'name': Rename_Task, 'duration_secs': 0.154979} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.992117] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1325.992383] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a47560e9-d27f-45e3-a409-d15098a4c3ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.999662] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1325.999662] env[63371]: value = "task-1773550" [ 1325.999662] env[63371]: _type = "Task" [ 1325.999662] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.008357] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.029688] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5282bf79-ac01-453f-116d-726e6ec68cab, 'name': SearchDatastore_Task, 'duration_secs': 0.012046} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.030080] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.030468] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1326.030579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.030852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.031072] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.031373] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de629420-51e7-4175-8d7a-1f2fa4d3dc8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.039980] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.040151] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1326.041022] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a388e72-8c9e-48a6-8c6e-4530bc6dd2f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.046800] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1326.046800] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524a0f14-99ba-6d18-48d5-2c64b84a5c40" [ 1326.046800] env[63371]: _type = "Task" [ 1326.046800] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.050994] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1326.059644] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524a0f14-99ba-6d18-48d5-2c64b84a5c40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.136254] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.226944] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773549, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.290492] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Successfully created port: 358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1326.386349] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ec11a3-70af-49aa-bff0-c7e4d931b8a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.397034] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ef1c97-6697-4dc5-b726-ae7c79e7ce7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.434422] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26504e33-830d-41fb-a202-d2bf7784122d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.443123] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db23a582-ee50-483b-939a-9ec36f642482 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.447939] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.448104] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.448254] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1326.460578] env[63371]: DEBUG nova.compute.provider_tree [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.513789] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773550, 'name': PowerOnVM_Task, 'duration_secs': 0.477552} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.514196] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1326.514449] env[63371]: INFO nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Took 10.04 seconds to spawn the instance on the hypervisor. [ 1326.514792] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1326.519386] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35381fc-6f99-4866-a084-9563aa4860b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.562277] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524a0f14-99ba-6d18-48d5-2c64b84a5c40, 'name': SearchDatastore_Task, 'duration_secs': 0.008167} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.562277] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c969cfe-57e9-4f86-a4b7-619fefb445d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.573512] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1326.573512] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52942d75-4b8f-948e-8693-71b37de547fc" [ 1326.573512] env[63371]: _type = "Task" [ 1326.573512] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.587470] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52942d75-4b8f-948e-8693-71b37de547fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.729336] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773549, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.845322] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.845746] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.964630] env[63371]: DEBUG nova.scheduler.client.report [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1327.045693] env[63371]: INFO nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Took 23.89 seconds to build instance. [ 1327.051477] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1327.071908] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1327.096497] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52942d75-4b8f-948e-8693-71b37de547fc, 'name': SearchDatastore_Task, 'duration_secs': 0.01185} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.096497] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.096497] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1327.096497] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-343eb3ce-b437-4f14-b18a-8b739fd39796 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.111459] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1327.111689] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1327.111841] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1327.113489] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1327.113587] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1327.113760] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1327.114930] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb369334-830e-46de-a122-c62b21359646 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.121550] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1327.121550] env[63371]: value = "task-1773552" [ 1327.121550] env[63371]: _type = "Task" [ 1327.121550] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.130936] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974f4116-27a9-4e53-acb6-30014bba2ff1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.145214] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773552, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.226805] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773549, 'name': PowerOnVM_Task, 'duration_secs': 1.142406} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.227579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1327.227579] env[63371]: INFO nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Took 13.13 seconds to spawn the instance on the hypervisor. [ 1327.227579] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1327.228339] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56064ba0-f53d-423b-9ddb-f4eabe832ee8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.404561] env[63371]: DEBUG nova.network.neutron [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.473088] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.473640] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1327.477794] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.082s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.483099] env[63371]: INFO nova.compute.claims [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1327.548707] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.407s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.602808] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Successfully updated port: 17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1327.634343] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773552, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.747331] env[63371]: DEBUG nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Received event network-vif-plugged-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1327.747331] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.747417] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.747599] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.747677] env[63371]: DEBUG nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] No waiting events found dispatching network-vif-plugged-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1327.747878] env[63371]: WARNING nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Received unexpected event network-vif-plugged-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 for instance with vm_state building and task_state spawning. [ 1327.749035] env[63371]: DEBUG nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Received event network-changed-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1327.749035] env[63371]: DEBUG nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Refreshing instance network info cache due to event network-changed-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1327.749035] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.749035] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.749296] env[63371]: DEBUG nova.network.neutron [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Refreshing network info cache for port 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1327.754704] env[63371]: INFO nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Took 25.45 seconds to build instance. [ 1327.782258] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updating instance_info_cache with network_info: [{"id": "d1b325d0-b864-44be-8fe4-b923489752d0", "address": "fa:16:3e:ff:96:d6", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b325d0-b8", "ovs_interfaceid": "d1b325d0-b864-44be-8fe4-b923489752d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.907264] env[63371]: INFO nova.compute.manager [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Took 2.13 seconds to deallocate network for instance. [ 1327.988279] env[63371]: DEBUG nova.compute.utils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1327.994201] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1327.994201] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1328.055539] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1328.092088] env[63371]: DEBUG nova.policy [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '145668abb3514b8ea11c4fc6cf13d2cd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f0a4db7d709461ca32a5dc0ebabdf31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1328.108232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.108232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.108232] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.136270] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773552, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.852265} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.142738] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Successfully updated port: 358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1328.142738] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1328.142738] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1328.142738] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25a56520-1ad5-454e-87f4-f750c02204b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.149433] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1328.149433] env[63371]: value = "task-1773553" [ 1328.149433] env[63371]: _type = "Task" [ 1328.149433] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.161524] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773553, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.203707] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "ca202079-2eae-441e-80f6-e403497e137d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.204065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.204406] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "ca202079-2eae-441e-80f6-e403497e137d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.204653] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.204882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.211725] env[63371]: INFO nova.compute.manager [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Terminating instance [ 1328.216398] env[63371]: DEBUG nova.compute.manager [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1328.216672] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1328.216992] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e912c210-3ae1-47ce-b9cd-afebf6195606" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.217299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.218212] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8391b27-07e7-4e66-b56e-213d95bab83b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.226847] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1328.227452] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36b46288-b5c6-4d83-972d-877733323477 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.234649] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1328.234649] env[63371]: value = "task-1773554" [ 1328.234649] env[63371]: _type = "Task" [ 1328.234649] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.244731] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.258637] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.971s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.286332] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.287034] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance network_info: |[{"id": "d1b325d0-b864-44be-8fe4-b923489752d0", "address": "fa:16:3e:ff:96:d6", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b325d0-b8", "ovs_interfaceid": "d1b325d0-b864-44be-8fe4-b923489752d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1328.287583] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:96:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1b325d0-b864-44be-8fe4-b923489752d0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1328.296711] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Creating folder: Project (c9d19f4772ff46d3b3024851822cf833). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1328.297438] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a69aaba-9d49-47ee-b24c-a5f5e2d45e2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.308859] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Created folder: Project (c9d19f4772ff46d3b3024851822cf833) in parent group-v368199. [ 1328.309086] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Creating folder: Instances. Parent ref: group-v368229. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1328.309362] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e00d421e-2d01-4b64-9a78-9462845fb5fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.318204] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Created folder: Instances in parent group-v368229. [ 1328.318447] env[63371]: DEBUG oslo.service.loopingcall [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1328.318632] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1328.318835] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-021039ec-0438-46b1-bd90-a1ef511642d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.342289] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1328.342289] env[63371]: value = "task-1773557" [ 1328.342289] env[63371]: _type = "Task" [ 1328.342289] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.353979] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773557, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.415420] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.494023] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1328.597657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.603695] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Successfully created port: f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1328.627127] env[63371]: DEBUG nova.network.neutron [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updated VIF entry in instance network info cache for port 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1328.627574] env[63371]: DEBUG nova.network.neutron [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.640841] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.641023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquired lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.641240] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.668567] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219062} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.670643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1328.671903] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a473eab0-cd98-4d8b-a112-17f04d5bd333 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.698091] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1328.699143] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1328.704448] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4039c97e-948f-4b4f-8142-59bbc6794ab6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.723941] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1328.723941] env[63371]: value = "task-1773559" [ 1328.723941] env[63371]: _type = "Task" [ 1328.723941] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.733209] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773559, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.746352] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773554, 'name': PowerOffVM_Task, 'duration_secs': 0.386805} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.749701] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1328.749992] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1328.753849] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2cc40f24-b90f-42a0-94e2-056e05ca6a8e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.761594] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1328.858572] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773557, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.859110] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1328.859461] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1328.859745] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Deleting the datastore file [datastore1] ca202079-2eae-441e-80f6-e403497e137d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1328.860116] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ac0cf98-9d0e-4c9b-b5ef-d74fb7d28d82 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.868025] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1328.868025] env[63371]: value = "task-1773561" [ 1328.868025] env[63371]: _type = "Task" [ 1328.868025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.880013] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.964410] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08491401-1548-4e5f-b96d-5f636de10dec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.973331] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f26135a-8f68-47ea-85f9-fdce975786f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.014976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff430d88-5bd2-4914-8daa-0e80c23ac2ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.023384] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7d9de1-7335-4c73-b3e2-e83964b34bea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.038320] env[63371]: DEBUG nova.compute.provider_tree [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.130234] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.191662] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1329.238616] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773559, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.256219] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updating instance_info_cache with network_info: [{"id": "17aee217-e9ac-4d12-8821-73130231a498", "address": "fa:16:3e:02:c3:2a", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17aee217-e9", "ovs_interfaceid": "17aee217-e9ac-4d12-8821-73130231a498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.294921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.362136] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773557, 'name': CreateVM_Task, 'duration_secs': 0.566007} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.362136] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1329.362136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.362136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.362136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1329.362430] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b77a8c6-e50b-4890-989a-15e7a803da01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.366546] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1329.366546] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae6ff4-7005-812d-55a5-c655621b0f43" [ 1329.366546] env[63371]: _type = "Task" [ 1329.366546] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.388886] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268186} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.389192] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae6ff4-7005-812d-55a5-c655621b0f43, 'name': SearchDatastore_Task, 'duration_secs': 0.010563} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.391921] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1329.392145] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1329.392361] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1329.392491] env[63371]: INFO nova.compute.manager [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1329.394619] env[63371]: DEBUG oslo.service.loopingcall [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.394619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.394619] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1329.394619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.394970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.394970] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1329.394970] env[63371]: DEBUG nova.compute.manager [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1329.394970] env[63371]: DEBUG nova.network.neutron [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1329.396637] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65b98fd6-2a54-4723-9565-a67cdd1c53e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.406723] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1329.407029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1329.408102] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cc1f6f4-7c5e-4158-b0f9-2af7f9c4dff6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.416877] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1329.416877] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d250ef-7a5f-c765-f39e-9bd40d692554" [ 1329.416877] env[63371]: _type = "Task" [ 1329.416877] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.426410] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d250ef-7a5f-c765-f39e-9bd40d692554, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.471329] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Updating instance_info_cache with network_info: [{"id": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "address": "fa:16:3e:88:50:21", "network": {"id": "36f3a290-9a15-4a89-ad59-f55babc49d13", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-72446873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a9101ae72864e0b8af6c598153ff40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358a8d7d-45", "ovs_interfaceid": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.516179] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1329.542933] env[63371]: DEBUG nova.scheduler.client.report [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1329.552118] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1329.552537] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1329.553194] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1329.553665] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1329.554537] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1329.554537] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1329.555572] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1329.555942] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1329.557793] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1329.558121] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1329.558394] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1329.562874] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46efe451-7712-4cfd-8dde-981c11434b7a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.571924] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196d9a48-e019-4b77-8bb6-1303ac8be38f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.739733] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773559, 'name': ReconfigVM_Task, 'duration_secs': 0.528636} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.739993] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfigured VM instance instance-00000009 to attach disk [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1329.745061] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b608e1f8-4119-4629-b6cf-b6f387ee59e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.753020] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1329.753020] env[63371]: value = "task-1773562" [ 1329.753020] env[63371]: _type = "Task" [ 1329.753020] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.764349] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.764349] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance network_info: |[{"id": "17aee217-e9ac-4d12-8821-73130231a498", "address": "fa:16:3e:02:c3:2a", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17aee217-e9", "ovs_interfaceid": "17aee217-e9ac-4d12-8821-73130231a498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1329.764534] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773562, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.766320] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:c3:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17aee217-e9ac-4d12-8821-73130231a498', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.774272] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating folder: Project (c0f2fde472b14ab9a4d20947ca714191). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.774919] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e110ab0e-8b9a-4952-a84d-89aa7b3ee23b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.785069] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created folder: Project (c0f2fde472b14ab9a4d20947ca714191) in parent group-v368199. [ 1329.785319] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating folder: Instances. Parent ref: group-v368232. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.785829] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70d2aca3-9cab-4d51-9a57-266178ed1a30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.798852] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created folder: Instances in parent group-v368232. [ 1329.799251] env[63371]: DEBUG oslo.service.loopingcall [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.800280] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1329.800280] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80cd7cd6-c892-4ec1-9cf3-74623d943b1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.822957] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.822957] env[63371]: value = "task-1773565" [ 1329.822957] env[63371]: _type = "Task" [ 1329.822957] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.835417] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773565, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.929305] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d250ef-7a5f-c765-f39e-9bd40d692554, 'name': SearchDatastore_Task, 'duration_secs': 0.011607} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.930275] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bb5e92e-a435-46a1-b82c-aaeaa4b71038 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.936398] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1329.936398] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52430c1a-19ae-e766-7743-4d28e2cae33d" [ 1329.936398] env[63371]: _type = "Task" [ 1329.936398] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.949496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.949496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.954342] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52430c1a-19ae-e766-7743-4d28e2cae33d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.974586] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Releasing lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.974873] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance network_info: |[{"id": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "address": "fa:16:3e:88:50:21", "network": {"id": "36f3a290-9a15-4a89-ad59-f55babc49d13", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-72446873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a9101ae72864e0b8af6c598153ff40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358a8d7d-45", "ovs_interfaceid": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1329.975231] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:50:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e30245c5-78f5-48e6-b504-c6c21f5a9b45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '358a8d7d-459f-49a9-b3c7-0cf811dd7e54', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.987513] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Creating folder: Project (7a9101ae72864e0b8af6c598153ff40e). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.988265] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60425f07-40cb-4fea-9917-cda54c4695c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.999631] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Created folder: Project (7a9101ae72864e0b8af6c598153ff40e) in parent group-v368199. [ 1329.999631] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Creating folder: Instances. Parent ref: group-v368235. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.999631] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dacf8ebe-09a9-4ec8-8e75-fb8a2fabe430 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.009295] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Created folder: Instances in parent group-v368235. [ 1330.009534] env[63371]: DEBUG oslo.service.loopingcall [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1330.009730] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1330.009951] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d74a341-7cf0-4d73-a6b9-e5b18c2920d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.031892] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1330.031892] env[63371]: value = "task-1773568" [ 1330.031892] env[63371]: _type = "Task" [ 1330.031892] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.044529] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773568, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.068084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.068084] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1330.069679] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.626s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.071791] env[63371]: INFO nova.compute.claims [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1330.177407] env[63371]: DEBUG nova.compute.manager [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Received event network-vif-plugged-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1330.177407] env[63371]: DEBUG oslo_concurrency.lockutils [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] Acquiring lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.177407] env[63371]: DEBUG oslo_concurrency.lockutils [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.177407] env[63371]: DEBUG oslo_concurrency.lockutils [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.177407] env[63371]: DEBUG nova.compute.manager [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] No waiting events found dispatching network-vif-plugged-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1330.177739] env[63371]: WARNING nova.compute.manager [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Received unexpected event network-vif-plugged-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 for instance with vm_state building and task_state spawning. [ 1330.262032] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773562, 'name': Rename_Task, 'duration_secs': 0.181971} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.262938] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1330.262938] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23bd1c64-a9fc-4497-8e5d-024d9487fefb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.270681] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1330.270681] env[63371]: value = "task-1773569" [ 1330.270681] env[63371]: _type = "Task" [ 1330.270681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.276666] env[63371]: DEBUG nova.network.neutron [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.280144] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773569, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.335065] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773565, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.453577] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52430c1a-19ae-e766-7743-4d28e2cae33d, 'name': SearchDatastore_Task, 'duration_secs': 0.032585} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.453983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.454388] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4/201a2d1e-9e2c-4c07-92be-200408874ad4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1330.454752] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15d1acc4-057c-4794-9a5c-d862938ac562 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.463598] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1330.463598] env[63371]: value = "task-1773570" [ 1330.463598] env[63371]: _type = "Task" [ 1330.463598] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.475460] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.543789] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773568, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.577215] env[63371]: DEBUG nova.compute.utils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1330.582859] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1330.582859] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1330.661336] env[63371]: DEBUG nova.policy [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b22696705ee840cb8ecd18e5abcec19c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5c5bf80b8e64c8795da4d79d6a89150', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1330.787814] env[63371]: INFO nova.compute.manager [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Took 1.39 seconds to deallocate network for instance. [ 1330.788360] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773569, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.840298] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773565, 'name': CreateVM_Task, 'duration_secs': 0.522558} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.840298] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1330.840298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.840298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.840298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1330.840298] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c38c8d36-8b57-46ab-8fc7-bf7f2afec7bd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.846940] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1330.846940] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52adfd61-3c72-f9c5-d3ac-b389e91c7fec" [ 1330.846940] env[63371]: _type = "Task" [ 1330.846940] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.852951] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Successfully updated port: f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1330.859955] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52adfd61-3c72-f9c5-d3ac-b389e91c7fec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.872439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.872656] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.978168] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773570, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.046411] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773568, 'name': CreateVM_Task, 'duration_secs': 0.529883} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.046672] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1331.047396] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.082507] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1331.115307] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Successfully created port: 1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1331.152983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.153425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.289980] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773569, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.299728] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.356959] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52adfd61-3c72-f9c5-d3ac-b389e91c7fec, 'name': SearchDatastore_Task, 'duration_secs': 0.052465} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.360444] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.360653] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.360890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.361043] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.361236] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.362533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.362533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquired lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.362533] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1331.363324] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.366762] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1331.366762] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dab59758-f263-485f-a913-82c81fa5940a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.366762] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb3035f6-e13f-4430-b935-54612cb68766 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.371524] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1331.371524] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae0a15-757b-9d23-9bc2-f7254279b4ff" [ 1331.371524] env[63371]: _type = "Task" [ 1331.371524] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.384380] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae0a15-757b-9d23-9bc2-f7254279b4ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.400881] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.401110] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.401934] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04c5ca38-7b1e-42fc-be3a-0ac18e195d00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.408633] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1331.408633] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523ee850-e65e-5726-a010-d290ed6293bd" [ 1331.408633] env[63371]: _type = "Task" [ 1331.408633] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.421335] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523ee850-e65e-5726-a010-d290ed6293bd, 'name': SearchDatastore_Task, 'duration_secs': 0.010073} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.422206] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5001994c-4b0c-42e6-91aa-de80bc2e5fa0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.430610] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1331.430610] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb1e3-e2a9-968a-30c7-6eb71f441061" [ 1331.430610] env[63371]: _type = "Task" [ 1331.430610] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.438744] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb1e3-e2a9-968a-30c7-6eb71f441061, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.461933] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Received event network-vif-plugged-d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1331.462167] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.462690] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.463171] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.463725] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] No waiting events found dispatching network-vif-plugged-d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1331.463725] env[63371]: WARNING nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Received unexpected event network-vif-plugged-d1b325d0-b864-44be-8fe4-b923489752d0 for instance with vm_state building and task_state spawning. [ 1331.463882] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Received event network-changed-d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1331.464061] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Refreshing instance network info cache due to event network-changed-d1b325d0-b864-44be-8fe4-b923489752d0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1331.464257] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.464393] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquired lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.464546] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Refreshing network info cache for port d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1331.485819] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773570, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614437} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.487615] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4/201a2d1e-9e2c-4c07-92be-200408874ad4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1331.487847] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1331.488662] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d85d73-9bed-497c-a8e4-a2ceb70e24e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.491960] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d42a6e34-612b-46ae-9923-ad706c010cf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.500456] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e6201f-350b-4a1e-9d4c-a06697750355 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.504409] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1331.504409] env[63371]: value = "task-1773572" [ 1331.504409] env[63371]: _type = "Task" [ 1331.504409] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.538254] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79e20c4-f4ef-4b09-937b-3410cf0f96b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.544431] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.549926] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5044b0a-f713-4b03-b2c4-251643125a7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.565881] env[63371]: DEBUG nova.compute.provider_tree [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.770590] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "fc0715a1-a056-4a1b-a86e-959680effc97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.770847] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.771466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.771466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.771983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.774156] env[63371]: INFO nova.compute.manager [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Terminating instance [ 1331.776060] env[63371]: DEBUG nova.compute.manager [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1331.776181] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1331.781511] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251b3359-6efc-4826-b65a-3bcaea98a381 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.787807] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1331.791089] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13d6d1ec-2788-4943-bb00-ca932b5ebf6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.792869] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773569, 'name': PowerOnVM_Task, 'duration_secs': 1.147822} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.793280] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1331.793376] env[63371]: INFO nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Took 11.82 seconds to spawn the instance on the hypervisor. [ 1331.793556] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1331.794726] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fda0edb-2a9d-4718-a7d9-f04d0066a2d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.798595] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1331.798595] env[63371]: value = "task-1773573" [ 1331.798595] env[63371]: _type = "Task" [ 1331.798595] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.826187] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773573, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.885661] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae0a15-757b-9d23-9bc2-f7254279b4ff, 'name': SearchDatastore_Task, 'duration_secs': 0.036884} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.885971] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.886232] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.886479] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.922189] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1331.942867] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb1e3-e2a9-968a-30c7-6eb71f441061, 'name': SearchDatastore_Task, 'duration_secs': 0.033103} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.943187] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.943403] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/33cf00ea-3195-41cf-9b7a-a8e64496a122.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1331.944392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.944392] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.944392] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df055bda-a9bb-4b03-8283-0de8983eff6c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.946219] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c4e06cd-e038-4ecd-82b4-c778509e1c23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.954284] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1331.954284] env[63371]: value = "task-1773574" [ 1331.954284] env[63371]: _type = "Task" [ 1331.954284] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.958241] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.958494] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.959677] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfe36660-386d-41dc-8eca-0c4de2a57aad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.964957] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.967785] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1331.967785] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd259b-956c-b78b-25b4-37aeba13f5e7" [ 1331.967785] env[63371]: _type = "Task" [ 1331.967785] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.977339] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd259b-956c-b78b-25b4-37aeba13f5e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.020438] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096234} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.020700] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1332.021519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e5f86b-dbbb-483c-bd69-ce9c89c33560 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.049362] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4/201a2d1e-9e2c-4c07-92be-200408874ad4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1332.049714] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ceac9cc-37c2-4926-bf4b-24775e9e4ba7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.070510] env[63371]: DEBUG nova.scheduler.client.report [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1332.075131] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1332.075131] env[63371]: value = "task-1773575" [ 1332.075131] env[63371]: _type = "Task" [ 1332.075131] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.090156] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.096740] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1332.125739] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:31:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='318065608',id=19,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-2139020529',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1332.126022] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1332.126022] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1332.126198] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1332.126291] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1332.126435] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1332.126646] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1332.126823] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1332.126991] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1332.127178] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1332.127377] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1332.128300] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fef783c-d7d6-4cf8-9fcd-67e0cd6a6cef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.137817] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5299f083-fc71-4430-9621-5f66019145b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.238967] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating instance_info_cache with network_info: [{"id": "f65a228f-d220-4478-a274-65cee7a3df3c", "address": "fa:16:3e:60:b5:56", "network": {"id": "c7fb5c0c-158e-4552-8360-a944e8eff32d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-951711523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0a4db7d709461ca32a5dc0ebabdf31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf65a228f-d2", "ovs_interfaceid": "f65a228f-d220-4478-a274-65cee7a3df3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.308715] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773573, 'name': PowerOffVM_Task, 'duration_secs': 0.387156} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.309404] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1332.309661] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1332.309930] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e7b52e8-b70e-4f1c-9b33-353bc87b306b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.331429] env[63371]: INFO nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Took 28.63 seconds to build instance. [ 1332.421532] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1332.421816] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1332.422047] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleting the datastore file [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1332.422358] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44c3755c-9b0d-4fec-adec-42c7fd6ede0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.436616] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1332.436616] env[63371]: value = "task-1773577" [ 1332.436616] env[63371]: _type = "Task" [ 1332.436616] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.446927] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.465538] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773574, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.482209] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd259b-956c-b78b-25b4-37aeba13f5e7, 'name': SearchDatastore_Task, 'duration_secs': 0.04533} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.482933] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbbfe4f4-2a6b-43c5-890e-5aab1a3e7613 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.489703] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1332.489703] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d52f33-dd0d-e7cf-6bb6-d6b18a64a03a" [ 1332.489703] env[63371]: _type = "Task" [ 1332.489703] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.503777] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d52f33-dd0d-e7cf-6bb6-d6b18a64a03a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.545988] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updated VIF entry in instance network info cache for port d1b325d0-b864-44be-8fe4-b923489752d0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1332.545988] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updating instance_info_cache with network_info: [{"id": "d1b325d0-b864-44be-8fe4-b923489752d0", "address": "fa:16:3e:ff:96:d6", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b325d0-b8", "ovs_interfaceid": "d1b325d0-b864-44be-8fe4-b923489752d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.582211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.582884] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1332.589125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.803s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.591067] env[63371]: DEBUG nova.objects.instance [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lazy-loading 'resources' on Instance uuid cffe6a79-ad7e-4488-b179-608a03c978aa {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.617253] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.742708] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Releasing lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.743041] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance network_info: |[{"id": "f65a228f-d220-4478-a274-65cee7a3df3c", "address": "fa:16:3e:60:b5:56", "network": {"id": "c7fb5c0c-158e-4552-8360-a944e8eff32d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-951711523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0a4db7d709461ca32a5dc0ebabdf31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf65a228f-d2", "ovs_interfaceid": "f65a228f-d220-4478-a274-65cee7a3df3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1332.744674] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:b5:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f65a228f-d220-4478-a274-65cee7a3df3c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1332.754192] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Creating folder: Project (1f0a4db7d709461ca32a5dc0ebabdf31). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1332.754533] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51376af7-09e0-44d8-88cb-20dd6450f890 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.770095] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Created folder: Project (1f0a4db7d709461ca32a5dc0ebabdf31) in parent group-v368199. [ 1332.770095] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Creating folder: Instances. Parent ref: group-v368238. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1332.771475] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93c664fa-4e31-41b9-811d-5835f3049e55 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.779479] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Created folder: Instances in parent group-v368238. [ 1332.779728] env[63371]: DEBUG oslo.service.loopingcall [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1332.780070] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1332.780187] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adc8093c-50df-4075-aeb0-41628e373a92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.806130] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1332.806130] env[63371]: value = "task-1773580" [ 1332.806130] env[63371]: _type = "Task" [ 1332.806130] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.819544] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.836497] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.141s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.946363] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.976321] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773574, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79691} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.976791] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Successfully updated port: 1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1332.978096] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/33cf00ea-3195-41cf-9b7a-a8e64496a122.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1332.978262] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1332.979742] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac8ebbe3-ea31-400f-8ca1-c3a757f02c34 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.986395] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1332.986395] env[63371]: value = "task-1773581" [ 1332.986395] env[63371]: _type = "Task" [ 1332.986395] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.004554] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.011216] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d52f33-dd0d-e7cf-6bb6-d6b18a64a03a, 'name': SearchDatastore_Task, 'duration_secs': 0.068536} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.011498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.011778] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d/4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1333.012327] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09ccac86-c750-4b25-b23b-ab4237896b78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.020747] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1333.020747] env[63371]: value = "task-1773582" [ 1333.020747] env[63371]: _type = "Task" [ 1333.020747] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.033179] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.050493] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Releasing lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.050493] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Received event network-vif-plugged-17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.050493] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.050778] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.050814] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.051033] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] No waiting events found dispatching network-vif-plugged-17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1333.051147] env[63371]: WARNING nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Received unexpected event network-vif-plugged-17aee217-e9ac-4d12-8821-73130231a498 for instance with vm_state building and task_state spawning. [ 1333.051326] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Received event network-vif-deleted-8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.051514] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Received event network-changed-17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.051771] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Refreshing instance network info cache due to event network-changed-17aee217-e9ac-4d12-8821-73130231a498. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1333.051894] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.051973] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquired lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.052519] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Refreshing network info cache for port 17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1333.090747] env[63371]: DEBUG nova.compute.utils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1333.096811] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1333.103716] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773575, 'name': ReconfigVM_Task, 'duration_secs': 0.658377} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.103932] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4/201a2d1e-9e2c-4c07-92be-200408874ad4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1333.104678] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97579428-e793-46a3-a036-6fba503a071b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.115459] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1333.115459] env[63371]: value = "task-1773583" [ 1333.115459] env[63371]: _type = "Task" [ 1333.115459] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.128687] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773583, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.307827] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "d9523239-79d1-434f-977a-e1f0e358c82b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.308109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.308330] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.308917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.308917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.312962] env[63371]: INFO nova.compute.manager [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Terminating instance [ 1333.321096] env[63371]: DEBUG nova.compute.manager [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1333.321424] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1333.325907] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21a4467-aec0-425e-a4d2-c6c817205dcd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.337019] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.338849] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1333.341560] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1333.342032] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-618b4f58-c162-463f-a8de-ebd6a15e04a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.361031] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1333.361031] env[63371]: value = "task-1773584" [ 1333.361031] env[63371]: _type = "Task" [ 1333.361031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.377645] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.449863] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.541654} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.452699] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1333.452922] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1333.453150] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1333.453364] env[63371]: INFO nova.compute.manager [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1333.453852] env[63371]: DEBUG oslo.service.loopingcall [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1333.453998] env[63371]: DEBUG nova.compute.manager [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1333.454108] env[63371]: DEBUG nova.network.neutron [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1333.484010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.484010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.484010] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1333.510586] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154189} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.515252] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1333.518678] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7270b58-1f20-4b15-abf3-b75dc1d0bb98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.543373] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/33cf00ea-3195-41cf-9b7a-a8e64496a122.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1333.545858] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c362ad2-69f6-48b9-bac2-bcd36d197734 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.565213] env[63371]: DEBUG nova.compute.manager [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Received event network-changed-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.565213] env[63371]: DEBUG nova.compute.manager [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Refreshing instance network info cache due to event network-changed-358a8d7d-459f-49a9-b3c7-0cf811dd7e54. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1333.565506] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] Acquiring lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.565660] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] Acquired lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.565915] env[63371]: DEBUG nova.network.neutron [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Refreshing network info cache for port 358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1333.578697] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773582, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.589226] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1333.589226] env[63371]: value = "task-1773585" [ 1333.589226] env[63371]: _type = "Task" [ 1333.589226] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.600418] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1333.609517] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773585, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.634500] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773583, 'name': Rename_Task, 'duration_secs': 0.407442} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.635016] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1333.635212] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b474de6-5783-4103-b61b-10f0c7b6f989 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.644878] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1333.644878] env[63371]: value = "task-1773586" [ 1333.644878] env[63371]: _type = "Task" [ 1333.644878] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.659464] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.684880] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02993a8f-1a75-4fc5-b6c7-13ac3cd5de3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.705025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25838425-092e-469b-81f5-346ba8b49089 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.743919] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e39ef8-26c4-4e46-9287-0f61b44fee4b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.753634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef12de85-e17d-4e31-8fa5-e3732cfa6fb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.773565] env[63371]: DEBUG nova.compute.provider_tree [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.823475] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.873953] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.876417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.033993] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773582, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.88023} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.034329] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d/4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1334.034643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1334.035162] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eea36f8a-a4f6-4efd-9639-edcfaf892111 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.045102] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1334.045102] env[63371]: value = "task-1773587" [ 1334.045102] env[63371]: _type = "Task" [ 1334.045102] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.055792] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.087454] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1334.105605] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "76c861a7-30f2-40f4-b723-7912975f36f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.106095] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.106499] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773585, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.117510] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updated VIF entry in instance network info cache for port 17aee217-e9ac-4d12-8821-73130231a498. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1334.118274] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updating instance_info_cache with network_info: [{"id": "17aee217-e9ac-4d12-8821-73130231a498", "address": "fa:16:3e:02:c3:2a", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17aee217-e9", "ovs_interfaceid": "17aee217-e9ac-4d12-8821-73130231a498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.161178] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773586, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.283299] env[63371]: DEBUG nova.scheduler.client.report [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1334.324153] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.372665] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.562276] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.358388} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.562594] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1334.564081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bba49b-582c-4f4c-9b08-20bfd2e541a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.599310] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d/4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1334.600961] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22681644-cce8-40c4-9c71-5bdce0c1dbf2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.623022] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1334.625994] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Releasing lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.631039] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1334.631039] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing instance network info cache due to event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1334.631039] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.631039] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquired lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.631039] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1334.638371] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773585, 'name': ReconfigVM_Task, 'duration_secs': 0.768248} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.641209] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/33cf00ea-3195-41cf-9b7a-a8e64496a122.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1334.642621] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1334.642621] env[63371]: value = "task-1773588" [ 1334.642621] env[63371]: _type = "Task" [ 1334.642621] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.642796] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55145a34-b7a2-4154-9344-696e3ef3c4e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.670809] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773586, 'name': PowerOnVM_Task, 'duration_secs': 1.018626} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.671108] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773588, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.673685] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1334.674236] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1334.674236] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1334.674236] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1334.674383] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1334.674523] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1334.674729] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1334.674883] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1334.675058] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1334.675247] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1334.675419] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1334.676867] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1334.677131] env[63371]: INFO nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Took 12.70 seconds to spawn the instance on the hypervisor. [ 1334.677321] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1334.678355] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ddf95c-797c-4a03-af2b-df73b8a1534d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.682826] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1334.682826] env[63371]: value = "task-1773589" [ 1334.682826] env[63371]: _type = "Task" [ 1334.682826] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.684052] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b37bb00-ab77-4dd7-b4a4-a7cea52fc37b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.696252] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a887f1f-c90f-4aaf-bb64-766ad7a62d73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.709052] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773589, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.727130] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1334.738615] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating folder: Project (8cce5d6d9b0e401caf02074dc66c16a6). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1334.740219] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8737b8a5-3cc1-478d-80a3-40be5b154d49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.753770] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created folder: Project (8cce5d6d9b0e401caf02074dc66c16a6) in parent group-v368199. [ 1334.753979] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating folder: Instances. Parent ref: group-v368241. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1334.754233] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf5e9975-1802-4190-9c2b-315150f04c05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.773624] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created folder: Instances in parent group-v368241. [ 1334.773869] env[63371]: DEBUG oslo.service.loopingcall [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1334.774258] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1334.774362] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47adeb32-602b-4c34-99dd-ea358ad09286 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.789344] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.200s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.793237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.656s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.793919] env[63371]: INFO nova.compute.claims [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1334.798423] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1334.798423] env[63371]: value = "task-1773592" [ 1334.798423] env[63371]: _type = "Task" [ 1334.798423] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.815302] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773592, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.821519] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updating instance_info_cache with network_info: [{"id": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "address": "fa:16:3e:81:c5:8b", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1846a8cd-46", "ovs_interfaceid": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.826022] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task, 'duration_secs': 2.004616} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.826487] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1334.827255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.827446] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.828040] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1334.828040] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d069a988-6a22-4e0e-8bbe-e2af6ff8cda9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.830841] env[63371]: INFO nova.scheduler.client.report [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Deleted allocations for instance cffe6a79-ad7e-4488-b179-608a03c978aa [ 1334.839289] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1334.839289] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5230558e-3e67-7fd5-6185-85de0bf48239" [ 1334.839289] env[63371]: _type = "Task" [ 1334.839289] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.848673] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5230558e-3e67-7fd5-6185-85de0bf48239, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.872564] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773584, 'name': PowerOffVM_Task, 'duration_secs': 1.208953} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.872831] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1334.872994] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1334.873253] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22984b90-12f7-4331-9ef8-874aeee625ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.972014] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1334.972014] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1334.972160] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleting the datastore file [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1334.972428] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d1fc5b2-60b5-4dd1-be64-ca831c58ebd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.981542] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1334.981542] env[63371]: value = "task-1773594" [ 1334.981542] env[63371]: _type = "Task" [ 1334.981542] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.990825] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.011091] env[63371]: DEBUG nova.network.neutron [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Updated VIF entry in instance network info cache for port 358a8d7d-459f-49a9-b3c7-0cf811dd7e54. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.011476] env[63371]: DEBUG nova.network.neutron [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Updating instance_info_cache with network_info: [{"id": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "address": "fa:16:3e:88:50:21", "network": {"id": "36f3a290-9a15-4a89-ad59-f55babc49d13", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-72446873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a9101ae72864e0b8af6c598153ff40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358a8d7d-45", "ovs_interfaceid": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.161478] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773588, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.220545] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773589, 'name': Rename_Task, 'duration_secs': 0.267444} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.220810] env[63371]: DEBUG nova.network.neutron [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.221793] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1335.222169] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a706364-6c05-47d0-b231-7141bf00a9fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.227706] env[63371]: INFO nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Took 30.00 seconds to build instance. [ 1335.236070] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1335.236070] env[63371]: value = "task-1773595" [ 1335.236070] env[63371]: _type = "Task" [ 1335.236070] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.248103] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.318482] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773592, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.327703] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.328177] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance network_info: |[{"id": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "address": "fa:16:3e:81:c5:8b", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1846a8cd-46", "ovs_interfaceid": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1335.328466] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:c5:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1846a8cd-46dc-4187-af60-d4e4eee750dc', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1335.340199] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating folder: Project (b5c5bf80b8e64c8795da4d79d6a89150). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.343616] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28567f13-d83a-456e-b022-cc9eacf692d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.347065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.699s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.361325] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5230558e-3e67-7fd5-6185-85de0bf48239, 'name': SearchDatastore_Task, 'duration_secs': 0.030935} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.361648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.361888] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1335.362149] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.362312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.362495] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1335.363196] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-973d886e-d03f-45cb-a3cd-200a19b244c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.367421] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created folder: Project (b5c5bf80b8e64c8795da4d79d6a89150) in parent group-v368199. [ 1335.367664] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating folder: Instances. Parent ref: group-v368244. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.368333] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44b3d297-e3f4-4455-ae97-e75b5e1a04b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.383934] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created folder: Instances in parent group-v368244. [ 1335.384163] env[63371]: DEBUG oslo.service.loopingcall [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.384361] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1335.384571] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4241be9-61e2-4db8-94d1-84194f1f11e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.404085] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1335.404292] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1335.405457] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14dce815-e752-45b3-9e4a-c33627332131 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.411846] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1335.411846] env[63371]: value = "task-1773598" [ 1335.411846] env[63371]: _type = "Task" [ 1335.411846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.416051] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1335.416051] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52800966-d064-5e7c-e6f5-8a4028cae0c1" [ 1335.416051] env[63371]: _type = "Task" [ 1335.416051] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.422755] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773598, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.428568] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52800966-d064-5e7c-e6f5-8a4028cae0c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.497723] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.403639} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.497987] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.498218] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1335.498428] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1335.498602] env[63371]: INFO nova.compute.manager [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1335.498841] env[63371]: DEBUG oslo.service.loopingcall [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.499181] env[63371]: DEBUG nova.compute.manager [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1335.499296] env[63371]: DEBUG nova.network.neutron [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1335.514514] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] Releasing lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.657980] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updated VIF entry in instance network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.658389] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.662883] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773588, 'name': ReconfigVM_Task, 'duration_secs': 0.659622} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.665744] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d/4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1335.666389] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8510e9ad-bcdd-4a67-b139-bf631608bfad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.675634] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1335.675634] env[63371]: value = "task-1773599" [ 1335.675634] env[63371]: _type = "Task" [ 1335.675634] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.690785] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773599, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.725881] env[63371]: INFO nova.compute.manager [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Took 2.27 seconds to deallocate network for instance. [ 1335.736662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.514s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.750060] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773595, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.778567] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received event network-vif-plugged-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1335.778912] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.779266] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.779445] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.779655] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] No waiting events found dispatching network-vif-plugged-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1335.779775] env[63371]: WARNING nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received unexpected event network-vif-plugged-f65a228f-d220-4478-a274-65cee7a3df3c for instance with vm_state building and task_state spawning. [ 1335.779930] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received event network-changed-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1335.780090] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Refreshing instance network info cache due to event network-changed-f65a228f-d220-4478-a274-65cee7a3df3c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1335.780269] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.780395] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquired lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.780541] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Refreshing network info cache for port f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1335.820588] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773592, 'name': CreateVM_Task, 'duration_secs': 0.593337} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.820934] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1335.821660] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.821990] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.822554] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1335.823000] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9e2f86e-d4a2-4923-a34c-607f18fc47f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.834347] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1335.834347] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5251bf1f-595d-405e-181b-18fb6b964e5d" [ 1335.834347] env[63371]: _type = "Task" [ 1335.834347] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.847176] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5251bf1f-595d-405e-181b-18fb6b964e5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.930469] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773598, 'name': CreateVM_Task, 'duration_secs': 0.498932} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.934075] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1335.934537] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52800966-d064-5e7c-e6f5-8a4028cae0c1, 'name': SearchDatastore_Task, 'duration_secs': 0.017079} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.935238] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.936064] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-325c4391-24a0-4b87-943e-d30747f2910b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.945676] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1335.945676] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528d1add-3003-da03-e4c8-b9679f2074c4" [ 1335.945676] env[63371]: _type = "Task" [ 1335.945676] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.956840] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528d1add-3003-da03-e4c8-b9679f2074c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.968034] env[63371]: DEBUG nova.compute.manager [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1336.166249] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Releasing lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.166673] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Received event network-vif-deleted-eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1336.187829] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773599, 'name': Rename_Task, 'duration_secs': 0.207209} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.187829] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1336.187977] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1361f27-2815-4470-a19f-2ea49bc437f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.195765] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1336.195765] env[63371]: value = "task-1773600" [ 1336.195765] env[63371]: _type = "Task" [ 1336.195765] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.204823] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.235992] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.236403] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1336.259706] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773595, 'name': PowerOnVM_Task, 'duration_secs': 0.704523} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.260445] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1336.261581] env[63371]: INFO nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Took 11.76 seconds to spawn the instance on the hypervisor. [ 1336.261581] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1336.262745] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493ca6bd-1418-43f6-825a-17786e9a990b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.280155] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5566dace-d588-4d81-84ef-b89b2a60886f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.293251] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d95fabb-134c-416a-b297-c81b6603b8a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.335033] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35a0808-84a8-4887-ac46-98dd0bb72511 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.353928] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5251bf1f-595d-405e-181b-18fb6b964e5d, 'name': SearchDatastore_Task, 'duration_secs': 0.018081} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.354216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.354395] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.355721] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2edf8f-e601-483d-b776-f36e9d850cd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.360646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.360646] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1336.360646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.361369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.361727] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1336.362337] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85804f55-911c-4034-8ac6-d19eb77ef5ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.374644] env[63371]: DEBUG nova.compute.provider_tree [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.380018] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1336.380018] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4b4e9-167e-4be9-3606-f45178e8a16f" [ 1336.380018] env[63371]: _type = "Task" [ 1336.380018] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.393641] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4b4e9-167e-4be9-3606-f45178e8a16f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.458603] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528d1add-3003-da03-e4c8-b9679f2074c4, 'name': SearchDatastore_Task, 'duration_secs': 0.021962} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.458958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.459165] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769/cbcdfe1a-86a4-4a12-99b5-44d291d41769.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1336.459482] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.459672] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1336.459904] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dae5235b-38cf-4532-894b-190b24dea388 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.461947] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1265596e-b533-4edf-a0ae-396ff0d33f76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.474452] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1336.474452] env[63371]: value = "task-1773601" [ 1336.474452] env[63371]: _type = "Task" [ 1336.474452] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.478794] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1336.479085] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1336.480113] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e47905c-038c-48a9-9b50-c2337ccbc80d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.485709] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.486929] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.488953] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1336.488953] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5288760d-bc0a-2c32-5814-0c7fb72b45ae" [ 1336.488953] env[63371]: _type = "Task" [ 1336.488953] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.498509] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5288760d-bc0a-2c32-5814-0c7fb72b45ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.716497] env[63371]: DEBUG nova.network.neutron [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.721092] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773600, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.776271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.802577] env[63371]: INFO nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Took 18.91 seconds to build instance. [ 1336.847132] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updated VIF entry in instance network info cache for port f65a228f-d220-4478-a274-65cee7a3df3c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1336.848123] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating instance_info_cache with network_info: [{"id": "f65a228f-d220-4478-a274-65cee7a3df3c", "address": "fa:16:3e:60:b5:56", "network": {"id": "c7fb5c0c-158e-4552-8360-a944e8eff32d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-951711523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0a4db7d709461ca32a5dc0ebabdf31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf65a228f-d2", "ovs_interfaceid": "f65a228f-d220-4478-a274-65cee7a3df3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.882968] env[63371]: DEBUG nova.scheduler.client.report [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1337.558671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.767s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.563354] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1337.565871] env[63371]: INFO nova.compute.manager [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Took 2.07 seconds to deallocate network for instance. [ 1337.566365] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.196s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.566673] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Releasing lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.566909] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received event network-changed-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1337.567139] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Refreshing instance network info cache due to event network-changed-9a4b63df-9697-47a1-81ad-c69476a80975. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1337.567383] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.567524] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquired lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.570450] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Refreshing network info cache for port 9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1337.572496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.156s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.572496] env[63371]: DEBUG nova.objects.instance [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lazy-loading 'resources' on Instance uuid 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1337.589786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.589786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.597749] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4b4e9-167e-4be9-3606-f45178e8a16f, 'name': SearchDatastore_Task, 'duration_secs': 0.02656} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.607879] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.608178] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1337.608426] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.609069] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773600, 'name': PowerOnVM_Task, 'duration_secs': 1.047591} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.609753] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5288760d-bc0a-2c32-5814-0c7fb72b45ae, 'name': SearchDatastore_Task, 'duration_secs': 0.017294} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.609973] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690956} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.610275] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1337.610508] env[63371]: INFO nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Took 10.54 seconds to spawn the instance on the hypervisor. [ 1337.610712] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1337.611823] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769/cbcdfe1a-86a4-4a12-99b5-44d291d41769.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1337.611963] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1337.612952] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8de5e0-9ac6-42c2-8a8a-eeda75162502 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.616262] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b5971a6-ac88-4eb7-8fb2-820543bb2863 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.618457] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7bc5eae-bc5b-46bc-9ce7-a0a3f9775fe5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.632096] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1337.632096] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527e0f0d-ce3c-1447-19c0-4ab34b789e73" [ 1337.632096] env[63371]: _type = "Task" [ 1337.632096] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.632324] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1337.632324] env[63371]: value = "task-1773602" [ 1337.632324] env[63371]: _type = "Task" [ 1337.632324] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.645851] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.650715] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527e0f0d-ce3c-1447-19c0-4ab34b789e73, 'name': SearchDatastore_Task, 'duration_secs': 0.01632} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.650947] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.651114] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1/cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1337.651398] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.651666] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1337.651931] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b3c1869-075a-4b67-9b6f-9de0cb2ac936 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.654765] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d45fb70-8aa3-466c-98db-2326d14a7a95 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.665375] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1337.665375] env[63371]: value = "task-1773603" [ 1337.665375] env[63371]: _type = "Task" [ 1337.665375] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.666914] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1337.667044] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1337.673165] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28a5aed9-36a6-48e9-afbf-b111f99535ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.682208] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.682498] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1337.682498] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523ab898-43c9-e7ea-843e-dcca47fcf842" [ 1337.682498] env[63371]: _type = "Task" [ 1337.682498] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.695728] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523ab898-43c9-e7ea-843e-dcca47fcf842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.708789] env[63371]: DEBUG nova.compute.manager [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received event network-changed-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1337.708789] env[63371]: DEBUG nova.compute.manager [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Refreshing instance network info cache due to event network-changed-a2807b8c-5895-474a-9c75-58bd21982409. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1337.708789] env[63371]: DEBUG oslo_concurrency.lockutils [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] Acquiring lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.708789] env[63371]: DEBUG oslo_concurrency.lockutils [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] Acquired lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.708789] env[63371]: DEBUG nova.network.neutron [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Refreshing network info cache for port a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1338.079357] env[63371]: DEBUG nova.compute.utils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1338.081371] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1338.081627] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.091538] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1338.095671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.156732] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103003} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.157335] env[63371]: INFO nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Took 18.09 seconds to build instance. [ 1338.160942] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1338.162942] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbcc126-1c12-468b-9504-6c7adea8a311 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.192081] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769/cbcdfe1a-86a4-4a12-99b5-44d291d41769.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1338.198112] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e414c804-0ca3-46ed-9734-ac2008fbb688 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.223541] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773603, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.230572] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523ab898-43c9-e7ea-843e-dcca47fcf842, 'name': SearchDatastore_Task, 'duration_secs': 0.015052} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.233602] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1338.233602] env[63371]: value = "task-1773604" [ 1338.233602] env[63371]: _type = "Task" [ 1338.233602] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.233602] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed4c73f9-3a5c-4b96-9d1b-4fcd57182d12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.253684] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1338.253684] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b038fc-8d1b-5a49-a8eb-2db9b829171d" [ 1338.253684] env[63371]: _type = "Task" [ 1338.253684] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.253684] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773604, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.269220] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b038fc-8d1b-5a49-a8eb-2db9b829171d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.356452] env[63371]: DEBUG nova.policy [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42d2f2710cc949ad9ffb24b9474bd8b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '014cf08938b14b169e45f01c87f33d23', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1338.586036] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1338.622162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.663813] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.605s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.686157] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.958554} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.687985] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1/cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1338.687985] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1338.695736] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b72a80e0-5e32-4ed2-bfa6-337e2f71bcb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.704532] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1338.704532] env[63371]: value = "task-1773605" [ 1338.704532] env[63371]: _type = "Task" [ 1338.704532] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.720975] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773605, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.732814] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c4fc9f-1095-48d6-b3d3-95054e81b370 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.762501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15df00ea-1b51-46d9-9895-ab74db7e4e18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.766193] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773604, 'name': ReconfigVM_Task, 'duration_secs': 0.509226} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.766483] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfigured VM instance instance-0000000d to attach disk [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769/cbcdfe1a-86a4-4a12-99b5-44d291d41769.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1338.768294] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6fba06b-f62b-4306-9ce5-3ad395fae0d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.773545] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b038fc-8d1b-5a49-a8eb-2db9b829171d, 'name': SearchDatastore_Task, 'duration_secs': 0.075868} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.800257] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.800902] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574/cfbd0c7c-243e-497a-acb1-ab9323c23574.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1338.802068] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1338.802068] env[63371]: value = "task-1773606" [ 1338.802068] env[63371]: _type = "Task" [ 1338.802068] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.802479] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12a164b0-709d-4eb1-aaac-1c5ad7f4de2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.805404] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9ac44a-75f5-4fe1-92de-efed7c68992b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.821449] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a282e5c-73ce-4b93-8f0a-3296da193096 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.826033] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1338.826033] env[63371]: value = "task-1773607" [ 1338.826033] env[63371]: _type = "Task" [ 1338.826033] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.826414] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773606, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.840545] env[63371]: DEBUG nova.compute.provider_tree [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.848168] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.003394] env[63371]: DEBUG nova.compute.manager [req-ea0f4026-3a5c-4ae4-8ac7-48d1a69be6a8 req-73fde250-cef4-4221-8704-d173aa887840 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Received event network-vif-deleted-969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1339.003609] env[63371]: DEBUG nova.compute.manager [req-ea0f4026-3a5c-4ae4-8ac7-48d1a69be6a8 req-73fde250-cef4-4221-8704-d173aa887840 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Received event network-vif-deleted-6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1339.167943] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1339.202812] env[63371]: INFO nova.compute.manager [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Rescuing [ 1339.203323] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.203526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.203695] env[63371]: DEBUG nova.network.neutron [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1339.219546] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773605, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131139} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.220203] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1339.222133] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d08bbcf-08e6-4ca2-90eb-4e87e24728b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.248284] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1/cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1339.248982] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c21f0bc9-e003-422b-bf22-ff6a239edd6c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.276734] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1339.276734] env[63371]: value = "task-1773608" [ 1339.276734] env[63371]: _type = "Task" [ 1339.276734] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.297106] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.316237] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773606, 'name': Rename_Task, 'duration_secs': 0.16666} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.316879] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1339.316879] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fd7f6de-af51-4051-b01f-71612db059e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.326881] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1339.326881] env[63371]: value = "task-1773609" [ 1339.326881] env[63371]: _type = "Task" [ 1339.326881] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.340368] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.344714] env[63371]: DEBUG nova.scheduler.client.report [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1339.348182] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773607, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.601539] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1339.640804] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1339.640804] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1339.641279] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.642081] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1339.642391] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1339.645357] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1339.645357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d374169-16e3-4800-bf82-e8eb41a99ce9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.656445] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff377230-892c-4ddb-b452-c8fe4d5c69cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.681190] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updated VIF entry in instance network info cache for port 9a4b63df-9697-47a1-81ad-c69476a80975. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1339.681190] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [{"id": "9a4b63df-9697-47a1-81ad-c69476a80975", "address": "fa:16:3e:ca:f3:37", "network": {"id": "5b9593c6-3e8e-4b0f-ad69-daf3e2419d2c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-90261722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e7f96aff7d240469616d256291f7081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4b63df-96", "ovs_interfaceid": "9a4b63df-9697-47a1-81ad-c69476a80975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.712317] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.789415] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.808147] env[63371]: DEBUG nova.network.neutron [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updated VIF entry in instance network info cache for port a2807b8c-5895-474a-9c75-58bd21982409. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1339.809109] env[63371]: DEBUG nova.network.neutron [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [{"id": "a2807b8c-5895-474a-9c75-58bd21982409", "address": "fa:16:3e:c2:68:79", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2807b8c-58", "ovs_interfaceid": "a2807b8c-5895-474a-9c75-58bd21982409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.841779] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.77461} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.845067] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574/cfbd0c7c-243e-497a-acb1-ab9323c23574.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1339.845296] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1339.845873] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773609, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.846098] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd551962-da37-46bb-9354-bf1a49c5aeeb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.850718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.279s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.853609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.255s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.855993] env[63371]: INFO nova.compute.claims [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1339.859935] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1339.859935] env[63371]: value = "task-1773610" [ 1339.859935] env[63371]: _type = "Task" [ 1339.859935] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.870818] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.879058] env[63371]: INFO nova.scheduler.client.report [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Deleted allocations for instance 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094 [ 1340.188404] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Releasing lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.189121] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received event network-vif-plugged-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1340.189121] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.189121] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.189469] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.189469] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] No waiting events found dispatching network-vif-plugged-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1340.192211] env[63371]: WARNING nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received unexpected event network-vif-plugged-1846a8cd-46dc-4187-af60-d4e4eee750dc for instance with vm_state building and task_state spawning. [ 1340.192211] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received event network-changed-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1340.192211] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Refreshing instance network info cache due to event network-changed-1846a8cd-46dc-4187-af60-d4e4eee750dc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1340.192211] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.192211] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquired lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.193151] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Refreshing network info cache for port 1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1340.223541] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "852e14a7-2f9f-421c-9804-56c885885c7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.223819] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.294617] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.313460] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Successfully created port: 54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1340.317416] env[63371]: DEBUG oslo_concurrency.lockutils [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] Releasing lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.348053] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773609, 'name': PowerOnVM_Task, 'duration_secs': 0.642935} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.348446] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1340.348673] env[63371]: INFO nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Took 10.83 seconds to spawn the instance on the hypervisor. [ 1340.348868] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1340.350836] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98d772a-882b-4086-8560-eaf8a897fd5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.377837] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167357} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.378200] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1340.379126] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f398a5-af44-4d0e-8964-5a1a606c4348 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.408328] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574/cfbd0c7c-243e-497a-acb1-ab9323c23574.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.409970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.823s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.411987] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acc11d21-1734-49f4-a1dd-05145822c7db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.439268] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1340.439268] env[63371]: value = "task-1773611" [ 1340.439268] env[63371]: _type = "Task" [ 1340.439268] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.450799] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773611, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.480079] env[63371]: DEBUG nova.network.neutron [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updating instance_info_cache with network_info: [{"id": "17aee217-e9ac-4d12-8821-73130231a498", "address": "fa:16:3e:02:c3:2a", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17aee217-e9", "ovs_interfaceid": "17aee217-e9ac-4d12-8821-73130231a498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.799732] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.878938] env[63371]: INFO nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Took 20.46 seconds to build instance. [ 1340.943467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.943467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.955686] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773611, 'name': ReconfigVM_Task, 'duration_secs': 0.466784} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.958778] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Reconfigured VM instance instance-0000000e to attach disk [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574/cfbd0c7c-243e-497a-acb1-ab9323c23574.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1340.959759] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a79980e-d677-4369-bfa7-6032df649127 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.967855] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1340.967855] env[63371]: value = "task-1773612" [ 1340.967855] env[63371]: _type = "Task" [ 1340.967855] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.982795] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773612, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.983298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.297169] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task, 'duration_secs': 1.852063} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.297169] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Reconfigured VM instance instance-0000000f to attach disk [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1/cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.297169] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73cf9181-4349-4b2a-b66f-18674a232b81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.305575] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1341.305575] env[63371]: value = "task-1773613" [ 1341.305575] env[63371]: _type = "Task" [ 1341.305575] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.321310] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773613, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.352034] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b1a0ed-9697-4c7b-a793-f973ef8043b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.360492] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1310c9-6700-4b19-9f93-e3d70302479a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.398050] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.641s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.400307] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b7da57-fbce-4627-849e-ad2b22f77013 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.409593] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9caf16f8-fe8b-4322-bf11-4e04f88a6f30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.430019] env[63371]: DEBUG nova.compute.provider_tree [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.450231] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updated VIF entry in instance network info cache for port 1846a8cd-46dc-4187-af60-d4e4eee750dc. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1341.450802] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updating instance_info_cache with network_info: [{"id": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "address": "fa:16:3e:81:c5:8b", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1846a8cd-46", "ovs_interfaceid": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.479822] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773612, 'name': Rename_Task, 'duration_secs': 0.392224} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.480075] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1341.480343] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb1f1cec-d3ac-4be7-a8f3-d49912feecf9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.487802] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1341.487802] env[63371]: value = "task-1773614" [ 1341.487802] env[63371]: _type = "Task" [ 1341.487802] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.498666] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.527549] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1341.527863] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a6c91af-5c91-4b25-9007-9a8c220a0515 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.539726] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1341.539726] env[63371]: value = "task-1773615" [ 1341.539726] env[63371]: _type = "Task" [ 1341.539726] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.555079] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.627090] env[63371]: DEBUG nova.compute.manager [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1341.628116] env[63371]: DEBUG nova.compute.manager [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing instance network info cache due to event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1341.628116] env[63371]: DEBUG oslo_concurrency.lockutils [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] Acquiring lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.628537] env[63371]: DEBUG oslo_concurrency.lockutils [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] Acquired lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.628537] env[63371]: DEBUG nova.network.neutron [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1341.818714] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773613, 'name': Rename_Task, 'duration_secs': 0.293967} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.819077] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1341.819386] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d8d2f1a-9260-47fd-8dcb-15add03b5a2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.830187] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1341.830187] env[63371]: value = "task-1773616" [ 1341.830187] env[63371]: _type = "Task" [ 1341.830187] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.839953] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.906195] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1341.937771] env[63371]: DEBUG nova.scheduler.client.report [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1341.954308] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Releasing lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.000625] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773614, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.051871] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773615, 'name': PowerOffVM_Task, 'duration_secs': 0.237161} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.052833] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1342.053684] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d12b4a-4e35-4e3d-92f3-90d8253fb7c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.081721] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75407826-178c-41ae-8286-96c25b75a137 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.112310] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1342.112587] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a43079c4-18a4-4053-a26b-0c0f540f5b9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.125810] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1342.125810] env[63371]: value = "task-1773617" [ 1342.125810] env[63371]: _type = "Task" [ 1342.125810] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.139269] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1342.140474] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1342.140474] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.140474] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.140859] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1342.141093] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87352aa9-d945-4fcd-8e8f-de493ffeced5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.152090] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1342.152418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1342.153134] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a8ac95b-a33e-4726-ab95-93ef2d20a852 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.159804] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1342.159804] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5231fdcb-47f5-774f-222b-64d6f8d4cb21" [ 1342.159804] env[63371]: _type = "Task" [ 1342.159804] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.171055] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5231fdcb-47f5-774f-222b-64d6f8d4cb21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.346999] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773616, 'name': PowerOnVM_Task, 'duration_secs': 0.474504} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.346999] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1342.347231] env[63371]: INFO nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1342.348075] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1342.348273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7b048e-75e8-42ff-87fd-1fc179344ce0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.449221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.450047] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1342.456878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.456878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.161s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.456878] env[63371]: INFO nova.compute.claims [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1342.502681] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773614, 'name': PowerOnVM_Task, 'duration_secs': 0.723405} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.504649] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1342.505232] env[63371]: INFO nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Took 10.41 seconds to spawn the instance on the hypervisor. [ 1342.505232] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1342.506425] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b2d113-7986-4282-8782-1fe19cc79bf3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.677881] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5231fdcb-47f5-774f-222b-64d6f8d4cb21, 'name': SearchDatastore_Task, 'duration_secs': 0.012641} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.678857] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43878fa4-c8a9-4967-a3be-33d8067e7fd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.690787] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1342.690787] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521e928a-16e0-2fa3-89c5-fa625d12ffed" [ 1342.690787] env[63371]: _type = "Task" [ 1342.690787] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.702463] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521e928a-16e0-2fa3-89c5-fa625d12ffed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.837988] env[63371]: DEBUG nova.network.neutron [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updated VIF entry in instance network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1342.838452] env[63371]: DEBUG nova.network.neutron [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.874873] env[63371]: INFO nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Took 20.46 seconds to build instance. [ 1342.961847] env[63371]: DEBUG nova.compute.utils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1342.966260] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1343.042904] env[63371]: INFO nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Took 20.68 seconds to build instance. [ 1343.086770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.087393] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.210453] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521e928a-16e0-2fa3-89c5-fa625d12ffed, 'name': SearchDatastore_Task, 'duration_secs': 0.012436} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.210878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.212031] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1343.212031] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce2c71c2-9a21-4d75-8e37-9ca271dfc64a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.223187] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1343.223187] env[63371]: value = "task-1773618" [ 1343.223187] env[63371]: _type = "Task" [ 1343.223187] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.235773] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.342121] env[63371]: DEBUG oslo_concurrency.lockutils [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] Releasing lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.377145] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.341s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.465085] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1343.545116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.838s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.632269] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Successfully updated port: 54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1343.736247] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773618, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.881407] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1344.027268] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5c4e90-1822-446f-89b8-225d0bd6d3d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.036641] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d9dd46-97cd-4d81-9bbd-acdc81127c26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.077663] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1344.081647] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf1170d-ca62-4a45-b446-9f2ac291d98c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.094081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2fbdf2-c66b-48c4-a442-9d67af5ead2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.109882] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1344.135814] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.135814] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquired lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.136086] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1344.237563] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.841086} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.238054] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1344.240025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2b5683-3cf8-4a95-92f0-f6d99843b132 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.269867] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1344.270636] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-687cc8a9-61e2-4ac3-8bd6-df71dc23e585 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.295060] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1344.295060] env[63371]: value = "task-1773619" [ 1344.295060] env[63371]: _type = "Task" [ 1344.295060] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.307593] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773619, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.321262] env[63371]: DEBUG nova.compute.manager [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Received event network-vif-plugged-54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1344.321449] env[63371]: DEBUG oslo_concurrency.lockutils [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] Acquiring lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.321664] env[63371]: DEBUG oslo_concurrency.lockutils [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] Lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.321818] env[63371]: DEBUG oslo_concurrency.lockutils [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] Lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.322014] env[63371]: DEBUG nova.compute.manager [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] No waiting events found dispatching network-vif-plugged-54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1344.322398] env[63371]: WARNING nova.compute.manager [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Received unexpected event network-vif-plugged-54ac14c5-812a-455e-88ff-92040c426688 for instance with vm_state building and task_state spawning. [ 1344.413417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.484230] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1344.512150] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1344.513803] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1344.513953] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1344.514143] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1344.515897] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1344.516119] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1344.517010] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87040b98-d705-49dd-8e4c-ac84362bc23f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.528131] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c284b4d-16b0-4edd-9881-9d3f45713a1a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.547325] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.554851] env[63371]: DEBUG oslo.service.loopingcall [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.555068] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1344.555306] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ffae8e1-b2e5-411b-9370-5b22626a18cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.583673] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.583673] env[63371]: value = "task-1773620" [ 1344.583673] env[63371]: _type = "Task" [ 1344.583673] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.601880] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773620, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.624626] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.635068] env[63371]: ERROR nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [req-482c8083-3b4f-444c-b529-5c1eae5159a4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-482c8083-3b4f-444c-b529-5c1eae5159a4"}]} [ 1344.654455] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1344.675934] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1344.675934] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1344.690106] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: 83f2afd9-956a-4c8c-9f08-b65141062b17 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1344.711783] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1344.720107] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1344.805623] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773619, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.085281] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Updating instance_info_cache with network_info: [{"id": "54ac14c5-812a-455e-88ff-92040c426688", "address": "fa:16:3e:e5:0b:b8", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ac14c5-81", "ovs_interfaceid": "54ac14c5-812a-455e-88ff-92040c426688", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.108145] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773620, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.282170] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c201718d-39db-4be3-a461-9501b3d30417 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.292136] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcbc751-d3e1-4630-9ef1-a87c58a3e17c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.336571] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdf91a6-16ff-4849-aa45-6537734a71e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.339682] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773619, 'name': ReconfigVM_Task, 'duration_secs': 0.644077} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.340432] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1345.341749] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4d5d63-3b08-42b7-b9f9-7d1de96fb588 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.348429] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cf9c5d-e976-4496-bbbc-c575e6c72e1a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.375417] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0f45737-140e-41fd-bd24-29bc076e9547 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.394754] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1345.404534] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1345.404534] env[63371]: value = "task-1773621" [ 1345.404534] env[63371]: _type = "Task" [ 1345.404534] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.416012] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773621, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.598280] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Releasing lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.598617] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance network_info: |[{"id": "54ac14c5-812a-455e-88ff-92040c426688", "address": "fa:16:3e:e5:0b:b8", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ac14c5-81", "ovs_interfaceid": "54ac14c5-812a-455e-88ff-92040c426688", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1345.598869] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773620, 'name': CreateVM_Task, 'duration_secs': 0.769784} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.599283] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:0b:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54ac14c5-812a-455e-88ff-92040c426688', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1345.609147] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Creating folder: Project (014cf08938b14b169e45f01c87f33d23). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.609147] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1345.609147] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ad982a5-5c92-4245-8474-7ce616018bab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.610339] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.610518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.611049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1345.611362] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ed03e96-89e7-476f-a072-68941a434385 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.618355] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1345.618355] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c48c71-c435-7061-f40a-35760197f6d7" [ 1345.618355] env[63371]: _type = "Task" [ 1345.618355] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.630137] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c48c71-c435-7061-f40a-35760197f6d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.631704] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Created folder: Project (014cf08938b14b169e45f01c87f33d23) in parent group-v368199. [ 1345.631704] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Creating folder: Instances. Parent ref: group-v368248. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.631931] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80217310-133e-4f71-b22f-7ab505bbe255 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.645031] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Created folder: Instances in parent group-v368248. [ 1345.645303] env[63371]: DEBUG oslo.service.loopingcall [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.645568] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1345.645802] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-faf52193-2230-44a3-8e93-3b84ec204e8c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.668369] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1345.668369] env[63371]: value = "task-1773624" [ 1345.668369] env[63371]: _type = "Task" [ 1345.668369] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.677154] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773624, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.766724] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.767662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.926018] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773621, 'name': ReconfigVM_Task, 'duration_secs': 0.371949} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.926018] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1345.926290] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f8aab75-594c-4215-9be7-4b4a9b146e9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.929545] env[63371]: ERROR nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [req-6763409b-6693-4b87-886c-59ab3091fe3b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6763409b-6693-4b87-886c-59ab3091fe3b"}]} [ 1345.937129] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1345.937129] env[63371]: value = "task-1773625" [ 1345.937129] env[63371]: _type = "Task" [ 1345.937129] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.953492] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.958641] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1345.980043] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1345.980043] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1345.994958] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: 83f2afd9-956a-4c8c-9f08-b65141062b17 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1346.016738] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1346.132160] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c48c71-c435-7061-f40a-35760197f6d7, 'name': SearchDatastore_Task, 'duration_secs': 0.014481} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.132537] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.133418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1346.133418] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.133418] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.133418] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.133739] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a5d3997-b18c-44c0-a8a0-a26c214e08e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.147911] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.147911] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1346.149259] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ac9204b-b0ad-48bf-8bf1-e57e7e9f3bd6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.164607] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1346.164607] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527185ee-6de6-7799-afae-a795538ad8c5" [ 1346.164607] env[63371]: _type = "Task" [ 1346.164607] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.184372] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "a43fed87-5205-4148-834e-66778a90b7bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.184372] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.187023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "a43fed87-5205-4148-834e-66778a90b7bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.187023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.187023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.187822] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527185ee-6de6-7799-afae-a795538ad8c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.187822] env[63371]: INFO nova.compute.manager [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Terminating instance [ 1346.198396] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773624, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.199333] env[63371]: DEBUG nova.compute.manager [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1346.199538] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1346.200789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f46566f-cbbe-41b0-8046-f5ef2274b230 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.209904] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1346.209904] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22dd47cd-1abf-4ca0-8db1-fa9b20d6c52c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.220732] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1346.220732] env[63371]: value = "task-1773626" [ 1346.220732] env[63371]: _type = "Task" [ 1346.220732] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.232334] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.454096] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773625, 'name': PowerOnVM_Task} progress is 92%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.596863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "9249f27a-1985-4be1-947c-e433c7aa26f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.597116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.618589] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125ee5ea-e652-4721-a119-d5224abaed3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.627634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa87b077-684e-48c7-83d7-0e0f1735b3df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.674800] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fe03dd-be13-4efe-b833-08b033e69d41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.692860] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773624, 'name': CreateVM_Task, 'duration_secs': 0.565206} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.693129] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527185ee-6de6-7799-afae-a795538ad8c5, 'name': SearchDatastore_Task, 'duration_secs': 0.024381} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.694384] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eff3bc3-44e6-4e13-a484-cb5848b32418 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.698596] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1346.700258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.700258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.700396] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1346.700521] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04f2e81a-6702-4f37-b3d0-4f74dd6ad6a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.702952] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40c06f67-a3ca-4496-9f85-7a4e77386ed1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.714408] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1346.720246] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1346.720246] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521bf9f0-40ec-69e2-8ac7-b36c66966199" [ 1346.720246] env[63371]: _type = "Task" [ 1346.720246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.720246] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1346.720246] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f6cdb7-1902-c742-702e-bec3a9cb922a" [ 1346.720246] env[63371]: _type = "Task" [ 1346.720246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.738446] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521bf9f0-40ec-69e2-8ac7-b36c66966199, 'name': SearchDatastore_Task, 'duration_secs': 0.018443} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.744071] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.744071] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.744071] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f6cdb7-1902-c742-702e-bec3a9cb922a, 'name': SearchDatastore_Task, 'duration_secs': 0.017103} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.744071] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773626, 'name': PowerOffVM_Task, 'duration_secs': 0.304496} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.744245] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22d1d89e-036d-41ef-9936-81bfa90bcae4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.745902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.746049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1346.746279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.746429] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.746597] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.746851] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1346.747011] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1346.747256] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69cab130-c0ee-4945-af7a-d56ad736ef1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.749727] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42203731-4aaf-4e3e-8b58-71206e4c5d9d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.758825] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1346.758825] env[63371]: value = "task-1773628" [ 1346.758825] env[63371]: _type = "Task" [ 1346.758825] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.763686] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.765260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.765260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.765260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.765260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.766201] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.766899] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1346.768299] env[63371]: INFO nova.compute.manager [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Terminating instance [ 1346.770071] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-302114d9-52fa-453e-a25b-bc124b9f8b00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.774019] env[63371]: DEBUG nova.compute.manager [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1346.774019] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1346.774150] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1834274d-b1a0-4159-9867-991ab608bb88 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.780376] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.783609] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1346.783609] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5224aac5-6d78-ca94-8c62-c6234d78ee35" [ 1346.783609] env[63371]: _type = "Task" [ 1346.783609] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.791112] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1346.791112] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-958fe09d-aba6-4cf4-8fc9-aef61800730d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.795861] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5224aac5-6d78-ca94-8c62-c6234d78ee35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.798429] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1346.798429] env[63371]: value = "task-1773629" [ 1346.798429] env[63371]: _type = "Task" [ 1346.798429] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.806747] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.847039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1346.847505] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1346.847629] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Deleting the datastore file [datastore1] a43fed87-5205-4148-834e-66778a90b7bc {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.847823] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b390be3-eed3-47ad-b059-f539221f673d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.858123] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1346.858123] env[63371]: value = "task-1773630" [ 1346.858123] env[63371]: _type = "Task" [ 1346.858123] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.869591] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.952874] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773625, 'name': PowerOnVM_Task, 'duration_secs': 0.612868} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.953180] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1346.957129] env[63371]: DEBUG nova.compute.manager [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1346.958064] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4688bd7-74ff-4074-9715-d9523a98b39b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.241581] env[63371]: ERROR nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [req-4e4fa56d-d030-465f-9b40-6569e601d3ff] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4e4fa56d-d030-465f-9b40-6569e601d3ff"}]} [ 1347.259594] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1347.272853] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773628, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.277310] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1347.277310] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1347.295923] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5224aac5-6d78-ca94-8c62-c6234d78ee35, 'name': SearchDatastore_Task, 'duration_secs': 0.014679} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.297381] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1347.301648] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31238640-2566-418e-86c2-045a9d67fa1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.316058] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1347.316058] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526e1f0c-488d-35d2-e75a-0035bf4f6ffa" [ 1347.316058] env[63371]: _type = "Task" [ 1347.316058] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.317910] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773629, 'name': PowerOffVM_Task, 'duration_secs': 0.256929} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.322059] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1347.322460] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1347.323712] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1347.325987] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52e0d4a0-22e2-4e07-9cc7-3f060b463b33 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.338044] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526e1f0c-488d-35d2-e75a-0035bf4f6ffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.373526] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.419920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1347.419920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1347.419920] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Deleting the datastore file [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1347.419920] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62a98648-4d0c-4fb9-be41-a4d7e48f9948 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.427023] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1347.427023] env[63371]: value = "task-1773632" [ 1347.427023] env[63371]: _type = "Task" [ 1347.427023] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.435176] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.474878] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Received event network-changed-54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1347.475099] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Refreshing instance network info cache due to event network-changed-54ac14c5-812a-455e-88ff-92040c426688. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1347.475342] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquiring lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.475485] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquired lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.475653] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Refreshing network info cache for port 54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1347.781710] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644866} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.781808] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1347.782027] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1347.783026] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2feb2a2-f229-4893-ab97-7d3404fbb7ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.794190] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1347.794190] env[63371]: value = "task-1773633" [ 1347.794190] env[63371]: _type = "Task" [ 1347.794190] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.810481] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773633, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.835753] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526e1f0c-488d-35d2-e75a-0035bf4f6ffa, 'name': SearchDatastore_Task, 'duration_secs': 0.052828} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.835860] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.836300] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b/362d8303-524a-457a-b8d9-2bad87fa816b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1347.837039] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2b626ed-30ed-477c-95c6-f1a5285cf1ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.848494] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1347.848494] env[63371]: value = "task-1773634" [ 1347.848494] env[63371]: _type = "Task" [ 1347.848494] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.862093] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.875313] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.527753} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.875666] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.875873] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.876200] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.876290] env[63371]: INFO nova.compute.manager [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1347.876835] env[63371]: DEBUG oslo.service.loopingcall [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.876835] env[63371]: DEBUG nova.compute.manager [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1347.876835] env[63371]: DEBUG nova.network.neutron [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1347.901478] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2da0f80-0d9a-4076-a1f3-a15f3bb5fe7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.910868] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7fa171-d710-486e-bb74-377455df097a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.951204] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c115d7fc-204b-48f1-b246-6e29ca5ad009 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.963490] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0da369e-4410-4777-9f72-23759413bd6e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.967548] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374584} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.967820] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.967998] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.968193] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.968393] env[63371]: INFO nova.compute.manager [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1347.968650] env[63371]: DEBUG oslo.service.loopingcall [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.969271] env[63371]: DEBUG nova.compute.manager [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1347.969619] env[63371]: DEBUG nova.network.neutron [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1347.982575] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1348.305590] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773633, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088205} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.306060] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1348.309043] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131c165d-2d6e-4a4f-b39d-d18433fe1923 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.337281] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1348.337281] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02fa12b4-ab10-48ab-8376-cb6879f69e9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.366480] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.367940] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1348.367940] env[63371]: value = "task-1773635" [ 1348.367940] env[63371]: _type = "Task" [ 1348.367940] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.378738] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.487268] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1348.820712] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Updated VIF entry in instance network info cache for port 54ac14c5-812a-455e-88ff-92040c426688. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1348.821214] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Updating instance_info_cache with network_info: [{"id": "54ac14c5-812a-455e-88ff-92040c426688", "address": "fa:16:3e:e5:0b:b8", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ac14c5-81", "ovs_interfaceid": "54ac14c5-812a-455e-88ff-92040c426688", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.866981] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773634, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.879685] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773635, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.994166] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.537s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.994166] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1348.996412] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.697s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.997052] env[63371]: DEBUG nova.objects.instance [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lazy-loading 'resources' on Instance uuid ca202079-2eae-441e-80f6-e403497e137d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1349.255064] env[63371]: DEBUG nova.network.neutron [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.324978] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Releasing lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.325285] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received event network-changed-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.325489] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Refreshing instance network info cache due to event network-changed-f65a228f-d220-4478-a274-65cee7a3df3c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1349.325803] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquiring lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.325957] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquired lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.326136] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Refreshing network info cache for port f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1349.343632] env[63371]: DEBUG nova.network.neutron [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.369403] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773634, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.024637} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.373636] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b/362d8303-524a-457a-b8d9-2bad87fa816b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1349.373976] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1349.374300] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c195894-33a1-4d14-8798-163727a8e551 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.383173] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773635, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.384556] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1349.384556] env[63371]: value = "task-1773636" [ 1349.384556] env[63371]: _type = "Task" [ 1349.384556] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.395958] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.497896] env[63371]: DEBUG nova.compute.utils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1349.499167] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1349.499376] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1349.648236] env[63371]: DEBUG nova.policy [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e49a0ae65b9b4f878930641771fec10f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4994abb8da3b4a018414c60719a056b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1349.725965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "b48a8e83-e581-4886-833b-bbce155d40d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.726393] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.757741] env[63371]: INFO nova.compute.manager [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Took 1.88 seconds to deallocate network for instance. [ 1349.848170] env[63371]: INFO nova.compute.manager [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Took 1.88 seconds to deallocate network for instance. [ 1349.885904] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773635, 'name': ReconfigVM_Task, 'duration_secs': 1.036358} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.889411] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1349.890410] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f324953-a1cc-465f-8054-d5ea8246b2f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.904124] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077704} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.904449] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1349.904449] env[63371]: value = "task-1773637" [ 1349.904449] env[63371]: _type = "Task" [ 1349.904449] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.904667] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1349.905870] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d994874-ccc5-4a51-905e-383479ea78ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.949120] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b/362d8303-524a-457a-b8d9-2bad87fa816b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1349.949313] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773637, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.952376] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12345c9b-0a13-4011-a0c2-55649b1d8ae3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.977915] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1349.977915] env[63371]: value = "task-1773638" [ 1349.977915] env[63371]: _type = "Task" [ 1349.977915] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.991073] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.004978] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1350.086244] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfebb3a-5643-4ef9-a82b-8a843a7d66f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.097501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2486b7b-2c6c-4e9b-8dad-cc71eb3c56d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.137401] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0124417-1b60-413e-abf5-a5634a284012 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.146624] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef3dd92-5d7c-43bf-b438-21ed125465bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.161978] env[63371]: DEBUG nova.compute.provider_tree [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.269441] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.330305] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Successfully created port: 4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1350.354127] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updated VIF entry in instance network info cache for port f65a228f-d220-4478-a274-65cee7a3df3c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1350.354503] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating instance_info_cache with network_info: [{"id": "f65a228f-d220-4478-a274-65cee7a3df3c", "address": "fa:16:3e:60:b5:56", "network": {"id": "c7fb5c0c-158e-4552-8360-a944e8eff32d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-951711523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0a4db7d709461ca32a5dc0ebabdf31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf65a228f-d2", "ovs_interfaceid": "f65a228f-d220-4478-a274-65cee7a3df3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.357117] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.420785] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773637, 'name': Rename_Task, 'duration_secs': 0.238471} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.421040] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1350.424998] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7141b60c-eec4-4092-b0a0-2c10332d9121 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.433691] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1350.433691] env[63371]: value = "task-1773639" [ 1350.433691] env[63371]: _type = "Task" [ 1350.433691] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.447922] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773639, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.489620] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.647582] env[63371]: DEBUG nova.compute.manager [req-87c0a5d6-a3e2-4e55-b652-3e51a1da9de3 req-c1f23d29-4504-4919-a0fa-97f66ba0613c service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-vif-deleted-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1350.648174] env[63371]: DEBUG nova.compute.manager [req-87c0a5d6-a3e2-4e55-b652-3e51a1da9de3 req-c1f23d29-4504-4919-a0fa-97f66ba0613c service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Received event network-vif-deleted-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1350.667131] env[63371]: DEBUG nova.scheduler.client.report [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1350.859275] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Releasing lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.859653] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received event network-changed-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1350.859831] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Refreshing instance network info cache due to event network-changed-1846a8cd-46dc-4187-af60-d4e4eee750dc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1350.860079] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquiring lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.860243] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquired lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.860409] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Refreshing network info cache for port 1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1350.947778] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773639, 'name': PowerOnVM_Task, 'duration_secs': 0.479019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.948309] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.948633] env[63371]: INFO nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Took 6.46 seconds to spawn the instance on the hypervisor. [ 1350.948752] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1350.949867] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebdc21c-c61b-4612-a398-e671345d047f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.991444] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773638, 'name': ReconfigVM_Task, 'duration_secs': 0.615184} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.992085] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b/362d8303-524a-457a-b8d9-2bad87fa816b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1350.992667] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dad0185e-5064-4a05-9230-8435a6764db7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.002207] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1351.002207] env[63371]: value = "task-1773640" [ 1351.002207] env[63371]: _type = "Task" [ 1351.002207] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.013742] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773640, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.017529] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1351.053368] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1351.053368] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1351.053368] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1351.053535] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1351.054206] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1351.054556] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1351.055041] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1351.057156] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1351.057156] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1351.057156] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1351.057156] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1351.057156] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cc45a0-34dc-48c3-b775-9a40f81c19de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.069295] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ede1186-7fae-4ff4-8324-c05d3ff3cfea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.174018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.175s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.175251] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.300s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.178390] env[63371]: INFO nova.compute.claims [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1351.202240] env[63371]: INFO nova.scheduler.client.report [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Deleted allocations for instance ca202079-2eae-441e-80f6-e403497e137d [ 1351.475088] env[63371]: INFO nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Took 22.91 seconds to build instance. [ 1351.513938] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773640, 'name': Rename_Task, 'duration_secs': 0.385717} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.514516] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1351.514921] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0041bb7-cb4d-4964-983b-116669397547 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.523785] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1351.523785] env[63371]: value = "task-1773641" [ 1351.523785] env[63371]: _type = "Task" [ 1351.523785] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.534909] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.718248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.514s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.926776] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updated VIF entry in instance network info cache for port 1846a8cd-46dc-4187-af60-d4e4eee750dc. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1351.927230] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updating instance_info_cache with network_info: [{"id": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "address": "fa:16:3e:81:c5:8b", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1846a8cd-46", "ovs_interfaceid": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.976863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.998s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.036272] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773641, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.432151] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Releasing lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.479833] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1352.498377] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Successfully updated port: 4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1352.540887] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773641, 'name': PowerOnVM_Task, 'duration_secs': 0.709241} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.541947] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1352.541947] env[63371]: INFO nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Took 12.94 seconds to spawn the instance on the hypervisor. [ 1352.541947] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1352.542993] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d4a345-14da-4cf9-895a-e1f71bbfa864 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.712317] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6494053-9c68-4e99-bc00-9a0567b88c6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.723759] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7935de-4ae5-4065-8330-07b24aca0149 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.762035] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96be5cb1-9bdc-44dd-9617-09036974fb19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.771491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8ff5c1-fe4f-462c-9fff-07ac2325eb32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.789417] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1353.004684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.004861] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquired lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.005019] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.023069] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.064709] env[63371]: INFO nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Took 26.95 seconds to build instance. [ 1353.323837] env[63371]: ERROR nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [req-c5f7441b-5b87-4589-80c8-ec740a645b8f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c5f7441b-5b87-4589-80c8-ec740a645b8f"}]} [ 1353.343061] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1353.357836] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1353.358124] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1353.376822] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1353.402734] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1353.546829] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1353.566683] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.505s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.581687] env[63371]: INFO nova.compute.manager [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Rebuilding instance [ 1353.657023] env[63371]: DEBUG nova.compute.manager [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1353.657023] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad62be9-a4f7-4291-b466-e4061e4301c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.791241] env[63371]: DEBUG nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received event network-vif-plugged-4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.791459] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Acquiring lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.791795] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.791949] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.792054] env[63371]: DEBUG nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] No waiting events found dispatching network-vif-plugged-4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.792608] env[63371]: WARNING nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received unexpected event network-vif-plugged-4493eb7b-33d3-4a78-a1dd-3a96c6144850 for instance with vm_state building and task_state spawning. [ 1353.792905] env[63371]: DEBUG nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received event network-changed-4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.793157] env[63371]: DEBUG nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Refreshing instance network info cache due to event network-changed-4493eb7b-33d3-4a78-a1dd-3a96c6144850. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1353.793396] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Acquiring lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.813456] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [{"id": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "address": "fa:16:3e:19:9f:ce", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4493eb7b-33", "ovs_interfaceid": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.977805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15de9f00-16d0-40cb-8aa7-0e10ba8c9e8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.988344] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797a270c-9075-44c1-bbda-cc04b145a625 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.027342] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e1369a-5058-423c-9d64-994d1ede71a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.036382] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059cc1a4-bdb6-447f-8e45-ba1a73af0021 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.053203] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1354.075351] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1354.174953] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1354.174953] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79de4b93-d9d8-4674-ad56-2b3a6b6e9490 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.182827] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1354.182827] env[63371]: value = "task-1773642" [ 1354.182827] env[63371]: _type = "Task" [ 1354.182827] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.194442] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.316674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Releasing lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.320021] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance network_info: |[{"id": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "address": "fa:16:3e:19:9f:ce", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4493eb7b-33", "ovs_interfaceid": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1354.320021] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Acquired lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.321265] env[63371]: DEBUG nova.network.neutron [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Refreshing network info cache for port 4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1354.321265] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:9f:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4493eb7b-33d3-4a78-a1dd-3a96c6144850', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1354.328139] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Creating folder: Project (4994abb8da3b4a018414c60719a056b9). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1354.329678] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9f422dd-964f-499e-82d4-dd02a6f503b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.344020] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Created folder: Project (4994abb8da3b4a018414c60719a056b9) in parent group-v368199. [ 1354.344020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Creating folder: Instances. Parent ref: group-v368251. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1354.344020] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba47e2a2-b29c-47b9-bc00-84cb4515d224 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.353440] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Created folder: Instances in parent group-v368251. [ 1354.353957] env[63371]: DEBUG oslo.service.loopingcall [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1354.354839] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1354.355332] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e778e7ad-03f7-42b6-9ae7-c7300e10905f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.380910] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1354.380910] env[63371]: value = "task-1773645" [ 1354.380910] env[63371]: _type = "Task" [ 1354.380910] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.390014] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773645, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.430750] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.599798] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 42 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1354.600259] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 42 to 43 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1354.600496] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1354.605729] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.695016] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773642, 'name': PowerOffVM_Task, 'duration_secs': 0.125551} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.695324] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1354.695556] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1354.696415] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1f8688-2b7d-4bf9-bfeb-50f7b69cc921 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.707157] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1354.707157] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a3ce34b-760c-4fcb-82fa-612d00261d05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.739481] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1354.740853] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1354.740853] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleting the datastore file [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1354.740853] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34a57d2b-57dd-4db1-a974-541c556cdd71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.753924] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1354.753924] env[63371]: value = "task-1773647" [ 1354.753924] env[63371]: _type = "Task" [ 1354.753924] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.767446] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.893738] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773645, 'name': CreateVM_Task, 'duration_secs': 0.459036} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.893927] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1354.894740] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.894928] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.895284] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1354.895549] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cce4025-eb8d-4bc9-a901-6ac474a9feed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.901482] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1354.901482] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52207b9f-d0ef-eba1-6d01-d8b1d501c0a2" [ 1354.901482] env[63371]: _type = "Task" [ 1354.901482] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.913507] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52207b9f-d0ef-eba1-6d01-d8b1d501c0a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.934759] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.105746] env[63371]: DEBUG nova.network.neutron [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updated VIF entry in instance network info cache for port 4493eb7b-33d3-4a78-a1dd-3a96c6144850. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1355.105988] env[63371]: DEBUG nova.network.neutron [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [{"id": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "address": "fa:16:3e:19:9f:ce", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4493eb7b-33", "ovs_interfaceid": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.108567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.933s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.108776] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1355.112528] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.876s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.112995] env[63371]: DEBUG nova.objects.instance [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lazy-loading 'resources' on Instance uuid fc0715a1-a056-4a1b-a86e-959680effc97 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1355.269025] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160511} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.269025] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1355.269025] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1355.269836] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1355.417026] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52207b9f-d0ef-eba1-6d01-d8b1d501c0a2, 'name': SearchDatastore_Task, 'duration_secs': 0.0204} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.417026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.417026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1355.417026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.417410] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.417410] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1355.417410] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a933038-a48a-4c9d-96aa-9102ee9ffc99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.429164] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1355.429164] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1355.429164] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db4c4d1a-26d4-44bd-b49d-33906cbf7e61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.437688] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1355.437688] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525df11a-d8bb-efb7-4b0e-06a686648a17" [ 1355.437688] env[63371]: _type = "Task" [ 1355.437688] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.450816] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525df11a-d8bb-efb7-4b0e-06a686648a17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.613450] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Releasing lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.616214] env[63371]: DEBUG nova.compute.utils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1355.621348] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1355.621786] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1355.708948] env[63371]: DEBUG nova.policy [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c65e6dd9e4468fb1a0235bac086151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4ca8a73414142d497ebd3d3f043d9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1355.951599] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525df11a-d8bb-efb7-4b0e-06a686648a17, 'name': SearchDatastore_Task, 'duration_secs': 0.011819} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.952817] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-270195b3-1fa1-4132-a404-280cae5aa732 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.963602] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1355.963602] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52407d31-7823-09e7-e357-efddee7add01" [ 1355.963602] env[63371]: _type = "Task" [ 1355.963602] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.973852] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52407d31-7823-09e7-e357-efddee7add01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.122982] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1356.150780] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848ef236-a2f5-4bff-a0ac-857ea65edccd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.160350] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175c5595-ab1c-475a-ab42-eec57c240a20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.194711] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c3ffeb-712e-416c-a0a3-c062830c2a3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.203486] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a189c44f-8a49-4a7f-98aa-6d937505b665 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.219446] env[63371]: DEBUG nova.compute.provider_tree [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.319264] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1356.319492] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1356.319696] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.319794] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1356.319972] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.324289] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1356.324460] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1356.324627] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1356.324792] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1356.324952] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1356.325137] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1356.325981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a92366-5102-49f9-acea-158d0d5fc31d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.336938] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d9d422-656e-4ea3-9a36-1e3c579d1bee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.354428] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1356.360645] env[63371]: DEBUG oslo.service.loopingcall [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1356.362987] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Successfully created port: e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1356.365769] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1356.365769] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c254aa4-50b6-4d5e-b3bd-b39c623c8015 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.393049] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1356.393049] env[63371]: value = "task-1773648" [ 1356.393049] env[63371]: _type = "Task" [ 1356.393049] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.402519] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773648, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.476465] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52407d31-7823-09e7-e357-efddee7add01, 'name': SearchDatastore_Task, 'duration_secs': 0.010069} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.476753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.479804] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd/7841ebd2-0c23-4e32-8b81-42311a32c6fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1356.479804] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c2cae59-23a8-40a7-afdd-d104ac00e262 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.490146] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1356.490146] env[63371]: value = "task-1773649" [ 1356.490146] env[63371]: _type = "Task" [ 1356.490146] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.501041] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.723569] env[63371]: DEBUG nova.scheduler.client.report [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1356.908040] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773648, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.007040] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773649, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.138341] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1357.176391] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1357.176391] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1357.176391] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1357.176610] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1357.176646] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1357.176793] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1357.177015] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1357.177515] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1357.177746] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1357.177927] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1357.178470] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1357.182185] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b756886-fd7e-4e63-810a-d5028fcf00c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.192735] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b062e01-616b-4579-b883-25ccdd5358dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.222427] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.222837] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.229886] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.118s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.232499] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 20.746s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.253518] env[63371]: INFO nova.scheduler.client.report [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted allocations for instance fc0715a1-a056-4a1b-a86e-959680effc97 [ 1357.407958] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773648, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.505165] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773649, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519214} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.505448] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd/7841ebd2-0c23-4e32-8b81-42311a32c6fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1357.505668] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1357.505933] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01b787a3-f87e-44de-a197-57664d5f7695 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.514179] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1357.514179] env[63371]: value = "task-1773650" [ 1357.514179] env[63371]: _type = "Task" [ 1357.514179] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.524208] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773650, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.740990] env[63371]: INFO nova.compute.claims [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1357.763618] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.992s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.911406] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773648, 'name': CreateVM_Task, 'duration_secs': 1.382616} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.911844] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1357.912119] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.912238] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.912561] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1357.912808] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3c45653-dc3a-4380-ac5d-f9350a83002f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.918754] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1357.918754] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526a93c4-2741-b52a-550b-3a9dff5e50ed" [ 1357.918754] env[63371]: _type = "Task" [ 1357.918754] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.930569] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526a93c4-2741-b52a-550b-3a9dff5e50ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.024576] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773650, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081137} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.028888] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1358.028888] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714271b2-42eb-49e3-ae87-09c1bf3fdcbd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.052676] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd/7841ebd2-0c23-4e32-8b81-42311a32c6fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1358.053335] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1208dd2-276f-4504-a850-1eea1a9655f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.079673] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1358.079673] env[63371]: value = "task-1773651" [ 1358.079673] env[63371]: _type = "Task" [ 1358.079673] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.089260] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773651, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.252647] env[63371]: INFO nova.compute.resource_tracker [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating resource usage from migration e496466e-2a3b-442c-9adb-941ce7e06a5e [ 1358.437488] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526a93c4-2741-b52a-550b-3a9dff5e50ed, 'name': SearchDatastore_Task, 'duration_secs': 0.013653} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.438319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.438319] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1358.438319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.438319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.438581] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1358.439610] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d789e9c5-369d-44f4-a0cb-8cb1e6330371 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.453732] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1358.453823] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1358.454559] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02420b35-8f4d-45d1-8426-30fa1295828a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.462746] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1358.462746] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5266d001-9833-62e6-38ac-29137b426daa" [ 1358.462746] env[63371]: _type = "Task" [ 1358.462746] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.475377] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5266d001-9833-62e6-38ac-29137b426daa, 'name': SearchDatastore_Task, 'duration_secs': 0.010031} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.476194] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-758b31ca-8584-4b01-95f3-8a0e8631db93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.483110] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1358.483110] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52272e96-65dd-2946-759b-83e67f54882e" [ 1358.483110] env[63371]: _type = "Task" [ 1358.483110] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.491971] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52272e96-65dd-2946-759b-83e67f54882e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.593375] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773651, 'name': ReconfigVM_Task, 'duration_secs': 0.304821} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.595403] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd/7841ebd2-0c23-4e32-8b81-42311a32c6fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1358.595868] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-315d7f15-589f-40f0-b3d4-74b6a61c2422 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.604921] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1358.604921] env[63371]: value = "task-1773652" [ 1358.604921] env[63371]: _type = "Task" [ 1358.604921] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.617511] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773652, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.878136] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb8848e-2114-4428-b3f0-bad29ba76088 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.889558] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7ffbff-d0e5-4225-bb48-0be97821a59f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.924738] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Successfully updated port: e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1358.926767] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0214f55b-909b-4680-b554-c0c2c5e58499 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.936433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e472eb9-21dd-4c6d-9be1-f0f56a06c31f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.955013] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1359.000522] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52272e96-65dd-2946-759b-83e67f54882e, 'name': SearchDatastore_Task, 'duration_secs': 0.009675} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.000856] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.001127] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1359.001406] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-158150a2-fad0-48f1-a31c-077ad3503046 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.014836] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1359.014836] env[63371]: value = "task-1773653" [ 1359.014836] env[63371]: _type = "Task" [ 1359.014836] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.024726] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.119154] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773652, 'name': Rename_Task, 'duration_secs': 0.168114} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.119630] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1359.119992] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1f3dae5-d952-44d9-b41f-fe053ea690a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.135196] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1359.135196] env[63371]: value = "task-1773654" [ 1359.135196] env[63371]: _type = "Task" [ 1359.135196] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.145360] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.201999] env[63371]: DEBUG nova.compute.manager [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Received event network-vif-plugged-e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1359.207887] env[63371]: DEBUG oslo_concurrency.lockutils [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] Acquiring lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.215161] env[63371]: DEBUG oslo_concurrency.lockutils [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.215161] env[63371]: DEBUG oslo_concurrency.lockutils [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.009s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.215161] env[63371]: DEBUG nova.compute.manager [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] No waiting events found dispatching network-vif-plugged-e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1359.215161] env[63371]: WARNING nova.compute.manager [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Received unexpected event network-vif-plugged-e2249de3-2c03-4371-aab4-6173dd2b5d56 for instance with vm_state building and task_state spawning. [ 1359.299107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "362d8303-524a-457a-b8d9-2bad87fa816b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.299433] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.299433] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.300092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.300092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.309251] env[63371]: INFO nova.compute.manager [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Terminating instance [ 1359.313180] env[63371]: DEBUG nova.compute.manager [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1359.313180] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.315085] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e66c313-023a-4c77-9b9f-81400665f751 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.327797] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.333983] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4de479f3-6880-41f0-980d-50987f6fa86e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.342832] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1359.342832] env[63371]: value = "task-1773655" [ 1359.342832] env[63371]: _type = "Task" [ 1359.342832] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.356492] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.434049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.434049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.434049] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1359.497542] env[63371]: ERROR nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-1b4621ad-cc4e-47bb-93b5-dae0216c5ac0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1b4621ad-cc4e-47bb-93b5-dae0216c5ac0"}]} [ 1359.522234] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1359.530780] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773653, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501153} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.531080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1359.531390] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1359.531586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4281c19d-d536-4c29-8fc1-0a538edad943 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.543719] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1359.543719] env[63371]: value = "task-1773656" [ 1359.543719] env[63371]: _type = "Task" [ 1359.543719] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.554962] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773656, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.556296] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1359.556552] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1359.573504] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1359.587481] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "b5e259ea-d103-41c6-84b3-748813bb514d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.587858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.598691] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1359.648808] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773654, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.661052] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.661307] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.700590] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "36b81143-211f-4c77-854b-abe0d3f39ce4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.702647] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.855581] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773655, 'name': PowerOffVM_Task, 'duration_secs': 0.267448} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.855878] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1359.856080] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.858984] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4ace4c8-5219-4a6d-b47b-3b4f7837d79d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.943935] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1359.943935] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1359.943935] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Deleting the datastore file [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.946147] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c61fc91-7035-43e9-b51d-ec3bfebdf665 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.953522] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1359.953522] env[63371]: value = "task-1773658" [ 1359.953522] env[63371]: _type = "Task" [ 1359.953522] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.965901] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.985148] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1360.055882] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773656, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081348} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.055882] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1360.056652] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1d2e8d-0478-41af-b4c1-0c7f468851a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.085709] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1360.089261] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-560ef428-cb06-4042-8e4b-687d74c71bd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.113589] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1360.113589] env[63371]: value = "task-1773659" [ 1360.113589] env[63371]: _type = "Task" [ 1360.113589] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.128761] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773659, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.142292] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773654, 'name': PowerOnVM_Task, 'duration_secs': 0.642961} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.145274] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1360.145540] env[63371]: INFO nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Took 9.13 seconds to spawn the instance on the hypervisor. [ 1360.146312] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1360.146840] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dd30a9-3d08-400d-bc74-e195b62a5157 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.151405] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Updating instance_info_cache with network_info: [{"id": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "address": "fa:16:3e:4b:68:50", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2249de3-2c", "ovs_interfaceid": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.230512] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fca64f-eb7c-4d44-a154-2fd217a8ee36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.244300] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86712cfc-0a95-4f13-adf5-351dcb1cbc8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.277491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca41cf6f-67e8-4849-be30-b58c10aaf2f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.285998] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9085e9fa-c241-4cf1-8f73-f92d99ffe6f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.300598] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1360.468559] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133731} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.468559] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1360.468559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1360.468559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1360.468559] env[63371]: INFO nova.compute.manager [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1360.468877] env[63371]: DEBUG oslo.service.loopingcall [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1360.468877] env[63371]: DEBUG nova.compute.manager [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1360.468877] env[63371]: DEBUG nova.network.neutron [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1360.630614] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773659, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.658730] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.659109] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance network_info: |[{"id": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "address": "fa:16:3e:4b:68:50", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2249de3-2c", "ovs_interfaceid": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1360.663607] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:68:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2249de3-2c03-4371-aab4-6173dd2b5d56', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1360.672484] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating folder: Project (a4ca8a73414142d497ebd3d3f043d9ae). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1360.673840] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28b03ac2-5a09-4a50-a02f-8ea708bb16d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.676225] env[63371]: INFO nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Took 31.41 seconds to build instance. [ 1360.688707] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created folder: Project (a4ca8a73414142d497ebd3d3f043d9ae) in parent group-v368199. [ 1360.689109] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating folder: Instances. Parent ref: group-v368255. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1360.689545] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-460a48e3-eed9-4d9a-b22d-b047648ee019 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.701623] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created folder: Instances in parent group-v368255. [ 1360.702062] env[63371]: DEBUG oslo.service.loopingcall [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1360.702705] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1360.703361] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-556df988-3952-408a-a810-dacc7f9bad8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.728310] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1360.728310] env[63371]: value = "task-1773662" [ 1360.728310] env[63371]: _type = "Task" [ 1360.728310] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.738548] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773662, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.826628] env[63371]: ERROR nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-d26bc867-3985-49e5-8d08-ecbb48acf20c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d26bc867-3985-49e5-8d08-ecbb48acf20c"}]} [ 1360.851374] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1360.867763] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1360.867969] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1360.885202] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1360.907668] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1361.126792] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773659, 'name': ReconfigVM_Task, 'duration_secs': 0.77588} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.127125] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1361.127758] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c469bfd-b048-41a3-9c98-5a4c5ee01700 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.136089] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1361.136089] env[63371]: value = "task-1773663" [ 1361.136089] env[63371]: _type = "Task" [ 1361.136089] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.150119] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773663, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.171028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.171319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.178467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.437s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.225929] env[63371]: DEBUG nova.network.neutron [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.242175] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773662, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.427381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "50d5eac1-0752-4089-948c-b04439df6f6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.427489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.443928] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9453433d-1274-4af1-8378-186cc5a3023f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.454365] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c75719a-7033-4055-9298-b5c0eeb7210d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.489998] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc930b82-6930-461b-8437-7b462032bf94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.499084] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fd089d-5d69-48b0-9451-20b6bb18588c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.513372] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1361.647898] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773663, 'name': Rename_Task, 'duration_secs': 0.143294} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.648188] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.648462] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e18884c1-5eae-45d7-8a59-5ddae62cd112 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.657937] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1361.657937] env[63371]: value = "task-1773664" [ 1361.657937] env[63371]: _type = "Task" [ 1361.657937] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.669309] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.681023] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1361.728764] env[63371]: INFO nova.compute.manager [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Took 1.26 seconds to deallocate network for instance. [ 1361.746504] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773662, 'name': CreateVM_Task, 'duration_secs': 0.668668} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.746681] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1361.751477] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.751696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.752256] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1361.752337] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b35ee775-5a4a-4685-9cba-353aaa877707 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.759443] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1361.759443] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526b26c1-6920-16e4-ce7d-0b35564614aa" [ 1361.759443] env[63371]: _type = "Task" [ 1361.759443] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.774190] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526b26c1-6920-16e4-ce7d-0b35564614aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010237} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.774497] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.774743] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1361.774982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.775151] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.775346] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1361.775622] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55ed11aa-a62a-4f5f-ae1e-2718763d46ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.789701] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1361.789888] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1361.790645] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8691d884-f807-49c2-bd10-ca6962f96106 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.797389] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1361.797389] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52605ebc-cf52-0a85-b4a5-9bbbc01e0493" [ 1361.797389] env[63371]: _type = "Task" [ 1361.797389] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.805807] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52605ebc-cf52-0a85-b4a5-9bbbc01e0493, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.885355] env[63371]: DEBUG nova.compute.manager [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Received event network-changed-e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1361.885470] env[63371]: DEBUG nova.compute.manager [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Refreshing instance network info cache due to event network-changed-e2249de3-2c03-4371-aab4-6173dd2b5d56. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1361.885691] env[63371]: DEBUG oslo_concurrency.lockutils [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] Acquiring lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.886007] env[63371]: DEBUG oslo_concurrency.lockutils [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] Acquired lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.887489] env[63371]: DEBUG nova.network.neutron [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Refreshing network info cache for port e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1362.034924] env[63371]: ERROR nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-c7922e61-1ea0-4b1c-b032-51d747506aca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c7922e61-1ea0-4b1c-b032-51d747506aca"}]} [ 1362.052137] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1362.069125] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1362.069125] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1362.083799] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1362.104431] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1362.169733] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773664, 'name': PowerOnVM_Task, 'duration_secs': 0.46225} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.170080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.170237] env[63371]: DEBUG nova.compute.manager [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1362.171030] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a60a53-3068-4ff7-aa5a-8e9eb05f9554 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.204261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.240738] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.251727] env[63371]: DEBUG nova.compute.manager [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received event network-changed {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1362.252039] env[63371]: DEBUG nova.compute.manager [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Refreshing instance network info cache due to event network-changed. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1362.252256] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] Acquiring lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.252397] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] Acquired lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.252552] env[63371]: DEBUG nova.network.neutron [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1362.312458] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52605ebc-cf52-0a85-b4a5-9bbbc01e0493, 'name': SearchDatastore_Task, 'duration_secs': 0.009259} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.318312] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd46db18-3028-4aa7-ba96-05ef51923839 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.325419] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1362.325419] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd722d-d187-2d96-1ea2-9e14b78edb08" [ 1362.325419] env[63371]: _type = "Task" [ 1362.325419] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.334711] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd722d-d187-2d96-1ea2-9e14b78edb08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.691847] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc62c889-9c6a-4d07-bace-662579bc5ca2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.698445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.704937] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83376b1e-067f-425c-9d66-7195ee228a4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.739333] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0466e4-c035-4419-b545-409ceac92731 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.747974] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf61ff5-bbad-4223-874a-6f0ddfbad3a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.766165] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1362.835316] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd722d-d187-2d96-1ea2-9e14b78edb08, 'name': SearchDatastore_Task, 'duration_secs': 0.011484} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.835586] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.835843] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1362.836118] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f981aac-d3df-4c26-b185-33529a8abaa9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.844011] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1362.844011] env[63371]: value = "task-1773665" [ 1362.844011] env[63371]: _type = "Task" [ 1362.844011] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.854616] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.047875] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.048195] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.048494] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.048702] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.048873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.051475] env[63371]: INFO nova.compute.manager [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Terminating instance [ 1363.053525] env[63371]: DEBUG nova.compute.manager [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1363.053776] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1363.054617] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f472da0-f138-4c45-b51f-366be758066a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.065601] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1363.065764] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-febecd38-ae5d-49ef-9d6b-8ad39ee279ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.074857] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1363.074857] env[63371]: value = "task-1773666" [ 1363.074857] env[63371]: _type = "Task" [ 1363.074857] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.090030] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.240394] env[63371]: DEBUG nova.network.neutron [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Updated VIF entry in instance network info cache for port e2249de3-2c03-4371-aab4-6173dd2b5d56. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1363.241198] env[63371]: DEBUG nova.network.neutron [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Updating instance_info_cache with network_info: [{"id": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "address": "fa:16:3e:4b:68:50", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2249de3-2c", "ovs_interfaceid": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.315216] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 49 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1363.315216] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 49 to 50 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1363.315216] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1363.360036] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773665, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.438600] env[63371]: DEBUG nova.network.neutron [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [{"id": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "address": "fa:16:3e:19:9f:ce", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4493eb7b-33", "ovs_interfaceid": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.587709] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773666, 'name': PowerOffVM_Task, 'duration_secs': 0.347384} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.588073] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1363.588292] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1363.588639] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-008d6851-ac16-4d28-91fb-34778699c39e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.684141] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1363.684473] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1363.684711] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Deleting the datastore file [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1363.685080] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20a03c54-2fbc-4d4f-8454-107f80881184 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.694558] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1363.694558] env[63371]: value = "task-1773668" [ 1363.694558] env[63371]: _type = "Task" [ 1363.694558] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.704833] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.746725] env[63371]: DEBUG oslo_concurrency.lockutils [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] Releasing lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.747868] env[63371]: DEBUG nova.compute.manager [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Received event network-vif-deleted-54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1363.821998] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 6.589s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.822244] env[63371]: INFO nova.compute.manager [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Migrating [ 1363.822507] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.822648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.823977] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.048s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.827331] env[63371]: INFO nova.compute.claims [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1363.833019] env[63371]: INFO nova.compute.rpcapi [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1363.833651] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.870269] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558896} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.870617] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1363.870936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1363.871306] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47ffbb41-002b-4e00-9964-81257552636c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.882309] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1363.882309] env[63371]: value = "task-1773669" [ 1363.882309] env[63371]: _type = "Task" [ 1363.882309] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.893311] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.943824] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] Releasing lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.038146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "1924d3d2-cc88-4fd2-b509-8463da796658" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.038146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.038146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "1924d3d2-cc88-4fd2-b509-8463da796658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.038146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.038306] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.039759] env[63371]: INFO nova.compute.manager [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Terminating instance [ 1364.044123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "refresh_cache-1924d3d2-cc88-4fd2-b509-8463da796658" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.044123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "refresh_cache-1924d3d2-cc88-4fd2-b509-8463da796658" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.044123] env[63371]: DEBUG nova.network.neutron [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.211530] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182637} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.214616] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1364.214616] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1364.214616] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1364.214616] env[63371]: INFO nova.compute.manager [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1364.214616] env[63371]: DEBUG oslo.service.loopingcall [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1364.214783] env[63371]: DEBUG nova.compute.manager [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1364.214783] env[63371]: DEBUG nova.network.neutron [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1364.362167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.362460] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.362689] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.393884] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072954} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.394193] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1364.395079] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9ab860-1a28-4bf2-820f-b1b0ce072a20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.422545] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1364.422659] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf358dfe-7a1a-4bd4-9df7-f1e81da662cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.449999] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1364.449999] env[63371]: value = "task-1773670" [ 1364.449999] env[63371]: _type = "Task" [ 1364.449999] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.459324] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773670, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.580280] env[63371]: DEBUG nova.network.neutron [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1364.694993] env[63371]: DEBUG nova.network.neutron [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.930299] env[63371]: DEBUG nova.compute.manager [req-f8737874-7ea4-432f-b8d5-b9e2052f1696 req-1f941a82-665a-4ef7-af40-1da8d84eebe0 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received event network-vif-deleted-4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1364.930561] env[63371]: INFO nova.compute.manager [req-f8737874-7ea4-432f-b8d5-b9e2052f1696 req-1f941a82-665a-4ef7-af40-1da8d84eebe0 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Neutron deleted interface 4493eb7b-33d3-4a78-a1dd-3a96c6144850; detaching it from the instance and deleting it from the info cache [ 1364.930748] env[63371]: DEBUG nova.network.neutron [req-f8737874-7ea4-432f-b8d5-b9e2052f1696 req-1f941a82-665a-4ef7-af40-1da8d84eebe0 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.963034] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773670, 'name': ReconfigVM_Task, 'duration_secs': 0.325105} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.965772] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfigured VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1364.967096] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b19c2df-ea8b-4a34-8c69-a69c4e938642 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.976444] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1364.976444] env[63371]: value = "task-1773671" [ 1364.976444] env[63371]: _type = "Task" [ 1364.976444] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.995351] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773671, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.153791] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.197217] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "refresh_cache-1924d3d2-cc88-4fd2-b509-8463da796658" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.197347] env[63371]: DEBUG nova.compute.manager [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1365.197554] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1365.198661] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036da373-6de8-4c51-8f7d-32aacec2b415 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.208579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1365.208988] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba08c0cf-8873-403e-b9a5-e76efc7003da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.221615] env[63371]: DEBUG nova.network.neutron [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.223483] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1365.223483] env[63371]: value = "task-1773672" [ 1365.223483] env[63371]: _type = "Task" [ 1365.223483] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.232536] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.404872] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a36987c-c18f-4d8f-9759-092bef8ad55c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.413491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f97bc6-359a-4759-aac0-bbadd4f7ee6e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.444211] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-861dc69b-5481-4a0d-8e0a-aef778aef43b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.446513] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238aa8b4-fe9d-4d6f-a88b-d4724df6aa8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.456301] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685051be-6146-4916-a915-872a670907eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.462639] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6b1a47-1dc8-4d7a-8870-967c4ad1dcb3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.484949] env[63371]: DEBUG nova.compute.provider_tree [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1365.504585] env[63371]: DEBUG nova.compute.manager [req-f8737874-7ea4-432f-b8d5-b9e2052f1696 req-1f941a82-665a-4ef7-af40-1da8d84eebe0 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Detach interface failed, port_id=4493eb7b-33d3-4a78-a1dd-3a96c6144850, reason: Instance 7841ebd2-0c23-4e32-8b81-42311a32c6fd could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1365.514415] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773671, 'name': Rename_Task, 'duration_secs': 0.156152} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.514970] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1365.515246] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b1de598-e5de-41ee-9589-d36ad90e7836 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.522371] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1365.522371] env[63371]: value = "task-1773673" [ 1365.522371] env[63371]: _type = "Task" [ 1365.522371] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.532139] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.547687] env[63371]: DEBUG nova.scheduler.client.report [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 50 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1365.547955] env[63371]: DEBUG nova.compute.provider_tree [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 50 to 51 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1365.548152] env[63371]: DEBUG nova.compute.provider_tree [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1365.657108] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.725840] env[63371]: INFO nova.compute.manager [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Took 1.51 seconds to deallocate network for instance. [ 1365.741749] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773672, 'name': PowerOffVM_Task, 'duration_secs': 0.199142} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.741981] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1365.742171] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1365.742446] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f984fff-fcf7-4e85-accb-e587a9830ead {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.773773] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1365.774062] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1365.774220] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleting the datastore file [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1365.774480] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72839552-6eb1-4706-bd9b-fbc4d1a974bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.782034] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1365.782034] env[63371]: value = "task-1773675" [ 1365.782034] env[63371]: _type = "Task" [ 1365.782034] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.795330] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.035058] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773673, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.060464] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.061122] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1366.065189] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.970s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.065556] env[63371]: DEBUG nova.objects.instance [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lazy-loading 'resources' on Instance uuid d9523239-79d1-434f-977a-e1f0e358c82b {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1366.236846] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.297899] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091378} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.298376] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1366.298418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1366.298575] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1366.298746] env[63371]: INFO nova.compute.manager [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1366.298995] env[63371]: DEBUG oslo.service.loopingcall [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.299209] env[63371]: DEBUG nova.compute.manager [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1366.299309] env[63371]: DEBUG nova.network.neutron [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1366.317877] env[63371]: DEBUG nova.network.neutron [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1366.538581] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773673, 'name': PowerOnVM_Task, 'duration_secs': 0.540835} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.539262] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1366.539739] env[63371]: INFO nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1366.540304] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1366.542302] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9cf06b6-6375-4185-9c36-0907ad87dd44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.567924] env[63371]: DEBUG nova.compute.utils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1366.569456] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1366.572857] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1366.716963] env[63371]: DEBUG nova.policy [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c1437e43364f0ba8db6677fe2ed978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3fa37041acf4211987c97c105c47cf0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1366.820301] env[63371]: DEBUG nova.network.neutron [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.068434] env[63371]: INFO nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Took 33.22 seconds to build instance. [ 1367.078725] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1367.177916] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7264c6-e9a1-4cc7-b98e-0bec65df7c2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.209722] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1367.216848] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7e41d2-3323-4dc9-b4ba-e92d228a82f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.225293] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6f85d1-d4b2-48c7-b2a0-2b56a5a38345 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.259560] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fc63d0-c4a0-4a7a-bafc-b8b7e756d2dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.268587] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb6e7b0-d0df-4b5e-b405-045319455f3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.286543] env[63371]: DEBUG nova.compute.provider_tree [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.324933] env[63371]: INFO nova.compute.manager [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Took 1.03 seconds to deallocate network for instance. [ 1367.369472] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Successfully created port: fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1367.571072] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.814s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.724256] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1367.724256] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80ac13b8-48b3-48c5-addc-81a4e51f354c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.735266] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1367.735266] env[63371]: value = "task-1773676" [ 1367.735266] env[63371]: _type = "Task" [ 1367.735266] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.747557] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.795109] env[63371]: DEBUG nova.scheduler.client.report [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1367.800025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.800579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.834942] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.077910] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1368.091247] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1368.117181] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1368.119934] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501df96f-dd66-411b-ad9d-bbf386fd3afe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.128894] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327e6ac5-434c-432f-a42e-a8d7b652e2d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.246782] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773676, 'name': PowerOffVM_Task, 'duration_secs': 0.23979} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.247071] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1368.247259] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1368.302013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.237s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.304341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.682s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.305789] env[63371]: INFO nova.compute.claims [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1368.336335] env[63371]: INFO nova.scheduler.client.report [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted allocations for instance d9523239-79d1-434f-977a-e1f0e358c82b [ 1368.601288] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.753620] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1368.754032] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1368.754032] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1368.754207] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1368.754345] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1368.754485] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1368.754688] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1368.754842] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1368.755008] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1368.757595] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1368.757754] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1368.763136] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-341fc6a6-4255-4fd2-9d63-c14226a99ace {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.780799] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1368.780799] env[63371]: value = "task-1773677" [ 1368.780799] env[63371]: _type = "Task" [ 1368.780799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.789748] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773677, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.844534] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.536s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.044891] env[63371]: DEBUG nova.compute.manager [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-vif-plugged-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1369.045138] env[63371]: DEBUG oslo_concurrency.lockutils [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] Acquiring lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.045336] env[63371]: DEBUG oslo_concurrency.lockutils [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.045504] env[63371]: DEBUG oslo_concurrency.lockutils [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.045664] env[63371]: DEBUG nova.compute.manager [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] No waiting events found dispatching network-vif-plugged-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1369.045839] env[63371]: WARNING nova.compute.manager [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received unexpected event network-vif-plugged-fcd67cd5-500d-457a-9bbb-655583d97dd2 for instance with vm_state building and task_state spawning. [ 1369.081863] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Successfully updated port: fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1369.291230] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773677, 'name': ReconfigVM_Task, 'duration_secs': 0.216396} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.291632] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1369.583617] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.583990] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.584282] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1369.798053] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1369.798343] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1369.799018] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.799712] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1369.799712] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.799712] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1369.799815] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1369.800354] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1369.800354] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1369.800354] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1369.800467] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1369.808491] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfiguring VM instance instance-00000009 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1369.811282] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-443f8ccc-96df-48b5-a62d-c98f5f6c09b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.831137] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1369.831137] env[63371]: value = "task-1773678" [ 1369.831137] env[63371]: _type = "Task" [ 1369.831137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.842838] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773678, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.886054] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615777db-f3cc-4cce-ba72-be08234ed74f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.898712] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89aedbe-ba54-4026-845e-5092a4370822 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.932704] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99abe26b-948d-4cfb-8d29-93efe6cdf822 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.941577] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fffcce-0459-48d9-a17a-c05a4cc012c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.955542] env[63371]: DEBUG nova.compute.provider_tree [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.006209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "64fc862c-a755-4cac-997b-7a8328638269" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.006396] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.038698] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "dcf8063b-56eb-439c-bee5-139a1e157714" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.038945] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.130673] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1370.309343] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.341768] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773678, 'name': ReconfigVM_Task, 'duration_secs': 0.152762} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.342060] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfigured VM instance instance-00000009 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1370.342842] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589fdc54-772f-4624-b8f2-c6207344a46d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.365546] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1370.365848] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a6e7438-8e14-4fa4-abba-0db4f9ac1e13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.386017] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1370.386017] env[63371]: value = "task-1773679" [ 1370.386017] env[63371]: _type = "Task" [ 1370.386017] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.393336] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.461613] env[63371]: DEBUG nova.scheduler.client.report [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1370.770033] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.770402] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.815019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.815019] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance network_info: |[{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1370.815438] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:92:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca99f7a1-6365-4d3c-af16-1b1c1288091e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fcd67cd5-500d-457a-9bbb-655583d97dd2', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1370.821025] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Creating folder: Project (b3fa37041acf4211987c97c105c47cf0). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1370.821433] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84786e61-a8e9-4b18-ba8a-85aac3a8946a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.835013] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Created folder: Project (b3fa37041acf4211987c97c105c47cf0) in parent group-v368199. [ 1370.835013] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Creating folder: Instances. Parent ref: group-v368258. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1370.835013] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7dd536d-3151-45de-a8a9-d3d1f08b4877 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.844129] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Created folder: Instances in parent group-v368258. [ 1370.844129] env[63371]: DEBUG oslo.service.loopingcall [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1370.844129] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1370.844129] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d82d5aa9-7e31-4206-bd35-c7a82de78582 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.862033] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1370.862033] env[63371]: value = "task-1773682" [ 1370.862033] env[63371]: _type = "Task" [ 1370.862033] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.869580] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773682, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.893745] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773679, 'name': ReconfigVM_Task, 'duration_secs': 0.24862} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.897018] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfigured VM instance instance-00000009 to attach disk [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1370.897018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1370.968797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.968797] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1370.970181] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.258s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.972694] env[63371]: INFO nova.compute.claims [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1371.080315] env[63371]: DEBUG nova.compute.manager [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1371.080695] env[63371]: DEBUG nova.compute.manager [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1371.081060] env[63371]: DEBUG oslo_concurrency.lockutils [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.081332] env[63371]: DEBUG oslo_concurrency.lockutils [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.081614] env[63371]: DEBUG nova.network.neutron [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1371.372149] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773682, 'name': CreateVM_Task, 'duration_secs': 0.330816} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.372343] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1371.373056] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.373227] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.373810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1371.374083] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d4966ec-0420-45cc-9600-d9319a678a8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.378463] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1371.378463] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37cf2-88c4-83e5-ce7c-5b1965a698bb" [ 1371.378463] env[63371]: _type = "Task" [ 1371.378463] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.386642] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37cf2-88c4-83e5-ce7c-5b1965a698bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.401018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ff26ae-1b58-4cd8-8a41-4361f1f29500 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.420223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15c989e-8f0c-42c5-a377-cfffda9f1ea7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.437305] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1371.471509] env[63371]: DEBUG nova.compute.utils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1371.473391] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1371.473565] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1371.537593] env[63371]: DEBUG nova.policy [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c65e6dd9e4468fb1a0235bac086151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4ca8a73414142d497ebd3d3f043d9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1371.899152] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37cf2-88c4-83e5-ce7c-5b1965a698bb, 'name': SearchDatastore_Task, 'duration_secs': 0.008997} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.899152] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.899152] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1371.899152] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.899491] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.899491] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1371.899491] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87685007-d58b-4a1a-bbc7-8a92e752f442 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.908609] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1371.908862] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1371.909916] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b1e574d-c72d-48a6-aff6-137f739accc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.918038] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1371.918038] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52950c01-82a6-4f33-8edc-5259359f24a4" [ 1371.918038] env[63371]: _type = "Task" [ 1371.918038] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.929529] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52950c01-82a6-4f33-8edc-5259359f24a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.976384] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1372.042197] env[63371]: DEBUG nova.network.neutron [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1372.042414] env[63371]: DEBUG nova.network.neutron [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.044262] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Successfully created port: 225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1372.046470] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Port 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1372.431057] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52950c01-82a6-4f33-8edc-5259359f24a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009625} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.431969] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df9dfd45-1b8e-4354-856b-5e3f2e2f101c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.438898] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1372.438898] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa65b7-e39b-b40f-7d4e-6265f71eb5c7" [ 1372.438898] env[63371]: _type = "Task" [ 1372.438898] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.446404] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa65b7-e39b-b40f-7d4e-6265f71eb5c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.459553] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11506186-d691-48ca-8548-184b163ea44d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.465617] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95705997-4ac8-44dc-8e32-bbcdb5b859ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.499025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c61e303-d604-4f21-b459-e768b97da9bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.505588] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fd62ba-260a-4885-bb4a-ea88e857f03f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.520524] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1372.547110] env[63371]: DEBUG oslo_concurrency.lockutils [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.950613] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa65b7-e39b-b40f-7d4e-6265f71eb5c7, 'name': SearchDatastore_Task, 'duration_secs': 0.008992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.950890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.951160] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e/e0369f27-68ea-49c4-8524-3dbbb3cde96e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1372.951427] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0732151-86ae-48ce-9d1f-ac62acff60bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.957893] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1372.957893] env[63371]: value = "task-1773683" [ 1372.957893] env[63371]: _type = "Task" [ 1372.957893] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.971882] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.003325] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1373.034679] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1373.034954] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1373.035135] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1373.035338] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1373.035514] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1373.035640] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1373.035863] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1373.036043] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1373.036227] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1373.036402] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1373.036587] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1373.037508] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8346b993-3528-44d5-9f8f-fa133d2ddd59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.045458] env[63371]: ERROR nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [req-f1ed4243-b73c-430a-8cfe-40ff7e2632b4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f1ed4243-b73c-430a-8cfe-40ff7e2632b4"}]} [ 1373.046771] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255ecb81-47dd-46a7-a7a4-3ff1ef8038f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.071200] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1373.079505] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.079746] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.079919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.095796] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1373.096048] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1373.107839] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1373.126986] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1373.469687] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456135} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.469890] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e/e0369f27-68ea-49c4-8524-3dbbb3cde96e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1373.470119] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1373.470380] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-69a96564-3125-4c6c-9794-152d29bdc011 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.476536] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1373.476536] env[63371]: value = "task-1773684" [ 1373.476536] env[63371]: _type = "Task" [ 1373.476536] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.488737] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773684, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.499266] env[63371]: DEBUG nova.compute.manager [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Received event network-vif-plugged-225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1373.499510] env[63371]: DEBUG oslo_concurrency.lockutils [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] Acquiring lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.499738] env[63371]: DEBUG oslo_concurrency.lockutils [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.499916] env[63371]: DEBUG oslo_concurrency.lockutils [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.500113] env[63371]: DEBUG nova.compute.manager [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] No waiting events found dispatching network-vif-plugged-225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1373.500300] env[63371]: WARNING nova.compute.manager [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Received unexpected event network-vif-plugged-225db88c-9e6c-40e6-a30e-a3830f2c411c for instance with vm_state building and task_state spawning. [ 1373.646106] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Successfully updated port: 225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1373.664642] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ed039e-8acc-42ee-b4b0-e4f3fb173722 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.673708] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcdb53c-6953-438c-ae63-fab78666600f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.708574] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf735c6-de57-48fb-b4a0-f5b6110424f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.718230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42363273-3629-4faa-9eef-e5c80f392b41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.732648] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1373.988582] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773684, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.053846} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.988846] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1373.989683] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f153dc6b-98e4-4e89-9a19-a42bd395b599 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.014938] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e/e0369f27-68ea-49c4-8524-3dbbb3cde96e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1374.015568] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02acb037-50cb-4040-b1b3-be9755729981 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.035891] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1374.035891] env[63371]: value = "task-1773685" [ 1374.035891] env[63371]: _type = "Task" [ 1374.035891] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.044328] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773685, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.141732] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.141974] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.142162] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.148287] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.148418] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.148553] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.266031] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 55 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1374.266031] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 55 to 56 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1374.266031] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1374.546640] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773685, 'name': ReconfigVM_Task, 'duration_secs': 0.285104} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.546921] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Reconfigured VM instance instance-00000014 to attach disk [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e/e0369f27-68ea-49c4-8524-3dbbb3cde96e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1374.547590] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de624123-bf3e-4460-b2e9-f8990944fdae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.553939] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1374.553939] env[63371]: value = "task-1773686" [ 1374.553939] env[63371]: _type = "Task" [ 1374.553939] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.562925] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773686, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.701818] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1374.770862] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.801s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.771387] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1374.774083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.320s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.775468] env[63371]: INFO nova.compute.claims [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.985541] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Updating instance_info_cache with network_info: [{"id": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "address": "fa:16:3e:77:6d:de", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap225db88c-9e", "ovs_interfaceid": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.021052] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.064685] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773686, 'name': Rename_Task, 'duration_secs': 0.160065} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.064955] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1375.065223] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16b4e848-aa49-408f-a6a8-dcf25b060170 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.071447] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1375.071447] env[63371]: value = "task-1773687" [ 1375.071447] env[63371]: _type = "Task" [ 1375.071447] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.081341] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.280170] env[63371]: DEBUG nova.compute.utils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1375.283454] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1375.284882] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1375.332443] env[63371]: DEBUG nova.policy [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9adda22338e04c6da4b1d87790d42ebc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8126cc358323499680ab7423d7b6ce0d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1375.489295] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.489295] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Instance network_info: |[{"id": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "address": "fa:16:3e:77:6d:de", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap225db88c-9e", "ovs_interfaceid": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1375.489592] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:6d:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '225db88c-9e6c-40e6-a30e-a3830f2c411c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1375.498143] env[63371]: DEBUG oslo.service.loopingcall [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1375.498568] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1375.498846] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-996aa210-d2a2-4a40-96a0-8958dc3dff28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.519109] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1375.519109] env[63371]: value = "task-1773688" [ 1375.519109] env[63371]: _type = "Task" [ 1375.519109] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.523191] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.529818] env[63371]: DEBUG nova.compute.manager [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Received event network-changed-225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1375.529818] env[63371]: DEBUG nova.compute.manager [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Refreshing instance network info cache due to event network-changed-225db88c-9e6c-40e6-a30e-a3830f2c411c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1375.529818] env[63371]: DEBUG oslo_concurrency.lockutils [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] Acquiring lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.529818] env[63371]: DEBUG oslo_concurrency.lockutils [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] Acquired lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.529818] env[63371]: DEBUG nova.network.neutron [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Refreshing network info cache for port 225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1375.535773] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773688, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.585624] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773687, 'name': PowerOnVM_Task, 'duration_secs': 0.409806} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.585893] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1375.586204] env[63371]: INFO nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Took 7.49 seconds to spawn the instance on the hypervisor. [ 1375.586410] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1375.587327] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea1b242-8bd8-4826-a803-31ed9cd090fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.639229] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Successfully created port: fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.784450] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1376.030941] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773688, 'name': CreateVM_Task, 'duration_secs': 0.358494} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.031214] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1376.031975] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.031975] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.032298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1376.032548] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a6c450a-210c-449e-9dbf-f95fac3bb573 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.045778] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1376.045778] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd4ee2-3f7e-7c63-7bae-f2f034a82406" [ 1376.045778] env[63371]: _type = "Task" [ 1376.045778] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.054758] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd4ee2-3f7e-7c63-7bae-f2f034a82406, 'name': SearchDatastore_Task, 'duration_secs': 0.009746} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.055059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.055432] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1376.055508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.055650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.055811] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.056068] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b749bdd-ecf1-414c-b4dc-65e9210cfacb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.059012] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e198222-2520-458c-8051-64405961b18c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.084143] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8117bad0-c5c5-49cc-a134-5ecf7df404a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.086515] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.086729] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1376.087731] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d6c25fc-df95-4784-becd-0e6609ca4134 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.095313] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1376.095313] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ddd1c8-5ac0-0935-c61e-fb330c939864" [ 1376.095313] env[63371]: _type = "Task" [ 1376.095313] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.098263] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1376.113185] env[63371]: INFO nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Took 39.37 seconds to build instance. [ 1376.118291] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ddd1c8-5ac0-0935-c61e-fb330c939864, 'name': SearchDatastore_Task, 'duration_secs': 0.008507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.119089] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c8707a3-b0bf-45b4-a2e5-e619cd602245 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.124307] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1376.124307] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1704e-9f10-5b35-ee3a-8c545bdec294" [ 1376.124307] env[63371]: _type = "Task" [ 1376.124307] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.139459] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1704e-9f10-5b35-ee3a-8c545bdec294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.324230] env[63371]: DEBUG nova.network.neutron [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Updated VIF entry in instance network info cache for port 225db88c-9e6c-40e6-a30e-a3830f2c411c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1376.324410] env[63371]: DEBUG nova.network.neutron [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Updating instance_info_cache with network_info: [{"id": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "address": "fa:16:3e:77:6d:de", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap225db88c-9e", "ovs_interfaceid": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.391455] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8400418-8101-469c-8e46-e8a7f5d4a28b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.399355] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64c9f14-c93d-41c5-b2a1-9fd0e652502a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.429607] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fb54c0-04e6-49a7-b9bd-4d8ebd68048e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.437088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc255fd-5510-4344-bfab-e1bd8070555d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.450762] env[63371]: DEBUG nova.compute.provider_tree [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.613524] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1376.613879] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb7a82b2-d8aa-4ee2-9889-712ee32f32c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.616358] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.771s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.623549] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1376.623549] env[63371]: value = "task-1773689" [ 1376.623549] env[63371]: _type = "Task" [ 1376.623549] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.635891] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1704e-9f10-5b35-ee3a-8c545bdec294, 'name': SearchDatastore_Task, 'duration_secs': 0.010266} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.639010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.639274] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606/e912c210-3ae1-47ce-b9cd-afebf6195606.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1376.639569] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.639783] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7846f6e4-f035-4696-8474-4ccc592a7e51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.646168] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1376.646168] env[63371]: value = "task-1773690" [ 1376.646168] env[63371]: _type = "Task" [ 1376.646168] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.656470] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.799565] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1376.824595] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1376.824863] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1376.825023] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.825200] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1376.825385] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.825563] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1376.825782] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1376.825996] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1376.826234] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1376.826433] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1376.826652] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1376.827243] env[63371]: DEBUG oslo_concurrency.lockutils [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] Releasing lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.829013] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ea9020-0152-4696-a4fa-cd2becfef1f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.837200] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2c3904-7781-4a75-85cd-e19f7d95575c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.954033] env[63371]: DEBUG nova.scheduler.client.report [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1377.119457] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1377.141167] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773689, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.159514] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773690, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.248952] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Successfully updated port: fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1377.459770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.460325] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1377.462915] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.050s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.464372] env[63371]: INFO nova.compute.claims [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.552914] env[63371]: DEBUG nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-vif-plugged-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1377.553173] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Acquiring lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.553390] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.553564] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.553739] env[63371]: DEBUG nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] No waiting events found dispatching network-vif-plugged-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1377.553960] env[63371]: WARNING nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received unexpected event network-vif-plugged-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de for instance with vm_state building and task_state spawning. [ 1377.554240] env[63371]: DEBUG nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1377.554429] env[63371]: DEBUG nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1377.554632] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.554768] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.554920] env[63371]: DEBUG nova.network.neutron [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1377.644718] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773689, 'name': PowerOnVM_Task, 'duration_secs': 0.788538} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.644718] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1377.644895] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1377.653815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.661882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.662125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.662333] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515289} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.662547] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606/e912c210-3ae1-47ce-b9cd-afebf6195606.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1377.662747] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1377.662993] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-834c27e8-ca6c-40be-9a5d-b700d5c5d325 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.670489] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1377.670489] env[63371]: value = "task-1773691" [ 1377.670489] env[63371]: _type = "Task" [ 1377.670489] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.681255] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.750558] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.968401] env[63371]: DEBUG nova.compute.utils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1377.972197] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1377.972371] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1378.011337] env[63371]: DEBUG nova.policy [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e7a7343e28c34bdbb36d36ef413a1968', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f4bffbe32a94e19a1dc4562f925ca9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1378.108980] env[63371]: DEBUG nova.network.neutron [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1378.187012] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060223} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.187301] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1378.190135] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099d893d-193e-44bb-9999-93fb6d712939 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.215885] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606/e912c210-3ae1-47ce-b9cd-afebf6195606.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1378.216105] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b20fc6c-9f4f-4ce7-958e-efab13813338 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.237520] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1378.237520] env[63371]: value = "task-1773692" [ 1378.237520] env[63371]: _type = "Task" [ 1378.237520] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.246323] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773692, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.277947] env[63371]: DEBUG nova.network.neutron [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.410523] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Successfully created port: 62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1378.479467] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1378.750125] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773692, 'name': ReconfigVM_Task, 'duration_secs': 0.392522} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.750125] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Reconfigured VM instance instance-00000015 to attach disk [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606/e912c210-3ae1-47ce-b9cd-afebf6195606.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1378.750459] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d93fe62-269d-4b02-a3cd-0289b1840186 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.760041] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1378.760041] env[63371]: value = "task-1773693" [ 1378.760041] env[63371]: _type = "Task" [ 1378.760041] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.768471] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773693, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.780473] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.784544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.784544] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1379.031521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1022f47-b004-4db4-a744-a7bfaf7a2e50 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.039412] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a1060f-6608-4b22-bcf8-dff5312f06b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.069063] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae0cfc5-3277-483a-972b-5f38a52c1c12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.076017] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d24010-c99d-486d-8988-90d365f43c63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.088810] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1379.272240] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773693, 'name': Rename_Task, 'duration_secs': 0.186499} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.272696] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1379.273084] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e569e92-a954-4326-b6d9-d5b23daa656a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.281168] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1379.281168] env[63371]: value = "task-1773694" [ 1379.281168] env[63371]: _type = "Task" [ 1379.281168] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.295607] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773694, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.320969] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1379.461346] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.490250] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1379.526337] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1379.526337] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1379.526337] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1379.526711] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1379.526711] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1379.526711] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1379.527543] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98f3e7c-1273-4963-b98c-af3712cf92cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.535977] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d1f10a-753a-422a-9ff3-50aea1b0dbfc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.611473] env[63371]: ERROR nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [req-160e8745-431f-4e3d-80c8-52e433163f60] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-160e8745-431f-4e3d-80c8-52e433163f60"}]} [ 1379.630469] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1379.646083] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1379.646532] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1379.659187] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1379.683790] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1379.799467] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773694, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.956907] env[63371]: DEBUG nova.compute.manager [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Received event network-vif-plugged-62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1379.957148] env[63371]: DEBUG oslo_concurrency.lockutils [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] Acquiring lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.957351] env[63371]: DEBUG oslo_concurrency.lockutils [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.957791] env[63371]: DEBUG oslo_concurrency.lockutils [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.958020] env[63371]: DEBUG nova.compute.manager [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] No waiting events found dispatching network-vif-plugged-62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1379.958196] env[63371]: WARNING nova.compute.manager [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Received unexpected event network-vif-plugged-62ec6b3f-aa36-49ba-ab5a-ce568c16837a for instance with vm_state building and task_state spawning. [ 1379.967927] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.968283] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance network_info: |[{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1379.968701] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:18:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbd3a7d0-068b-4df5-be7f-d8bf5fe260de', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1379.976809] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Creating folder: Project (8126cc358323499680ab7423d7b6ce0d). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1379.979959] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b289688a-afc8-4d5e-b2af-5141a91d6025 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.990743] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Created folder: Project (8126cc358323499680ab7423d7b6ce0d) in parent group-v368199. [ 1379.990943] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Creating folder: Instances. Parent ref: group-v368262. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1379.991189] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-397c9165-f4f8-4975-aa05-dc38c22b97a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.002297] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Created folder: Instances in parent group-v368262. [ 1380.002537] env[63371]: DEBUG oslo.service.loopingcall [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.002721] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1380.002925] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a134ed5c-a64d-4feb-89e8-e382b937dd44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.031445] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1380.031445] env[63371]: value = "task-1773697" [ 1380.031445] env[63371]: _type = "Task" [ 1380.031445] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.042968] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773697, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.043875] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Successfully updated port: 62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1380.126125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.126125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.126125] env[63371]: DEBUG nova.compute.manager [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Going to confirm migration 1 {{(pid=63371) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1380.273740] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc90e5a-6c81-48ce-964d-3a7736702eaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.281346] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6244ee51-ee36-4c97-ac34-9363dc6cced9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.292700] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773694, 'name': PowerOnVM_Task, 'duration_secs': 0.8812} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.320991] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1380.321621] env[63371]: INFO nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Took 7.32 seconds to spawn the instance on the hypervisor. [ 1380.321870] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1380.323301] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287fd274-70da-4130-a271-906ac9958af9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.326571] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a7535c-cdfc-40c4-b52d-c1d6e8556bf2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.335567] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de12ba3-18b0-46e1-aaf2-1f0879f9463e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.351973] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.541666] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773697, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.551490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.551676] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquired lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.552026] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1380.719385] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.719651] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.719799] env[63371]: DEBUG nova.network.neutron [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1380.719984] env[63371]: DEBUG nova.objects.instance [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lazy-loading 'info_cache' on Instance uuid f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.850700] env[63371]: INFO nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Took 42.25 seconds to build instance. [ 1380.884857] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 57 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1380.885137] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 57 to 58 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1380.885561] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1381.044051] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773697, 'name': CreateVM_Task, 'duration_secs': 0.563332} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.044235] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1381.044919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.045091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.045415] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1381.045668] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6393ae14-a05f-46bc-80ff-b1917e9ddc94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.050328] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1381.050328] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52df2149-d719-bb17-8979-fba2c286b67c" [ 1381.050328] env[63371]: _type = "Task" [ 1381.050328] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.059021] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df2149-d719-bb17-8979-fba2c286b67c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.087106] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1381.244182] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Updating instance_info_cache with network_info: [{"id": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "address": "fa:16:3e:ca:51:d0", "network": {"id": "2a5744d0-ee42-4b97-bf04-f0f41e3e2cdb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-555494198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4bffbe32a94e19a1dc4562f925ca9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ec6b3f-aa", "ovs_interfaceid": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.352092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.135s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.390032] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.927s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.390449] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1381.393518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.769s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.394981] env[63371]: INFO nova.compute.claims [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1381.562928] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df2149-d719-bb17-8979-fba2c286b67c, 'name': SearchDatastore_Task, 'duration_secs': 0.024273} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.567017] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.567017] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1381.567017] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.567017] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.567347] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.567347] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1425632-9d78-4910-bb02-ee6ee5b0329c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.574453] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1381.574632] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1381.575372] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a03bbb-935d-4f1a-b320-bbf37a2937cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.582654] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1381.582654] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528598e2-7fc4-0f56-ddf2-8993e0962d7d" [ 1381.582654] env[63371]: _type = "Task" [ 1381.582654] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.590423] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528598e2-7fc4-0f56-ddf2-8993e0962d7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.750668] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Releasing lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.750931] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance network_info: |[{"id": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "address": "fa:16:3e:ca:51:d0", "network": {"id": "2a5744d0-ee42-4b97-bf04-f0f41e3e2cdb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-555494198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4bffbe32a94e19a1dc4562f925ca9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ec6b3f-aa", "ovs_interfaceid": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1381.751356] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:51:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '88651df2-0506-4f6c-b868-dd30a81f2b1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62ec6b3f-aa36-49ba-ab5a-ce568c16837a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1381.759782] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Creating folder: Project (6f4bffbe32a94e19a1dc4562f925ca9b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1381.760067] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bc4661b-a48f-4223-943d-9bdf6403e55c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.771594] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Created folder: Project (6f4bffbe32a94e19a1dc4562f925ca9b) in parent group-v368199. [ 1381.771822] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Creating folder: Instances. Parent ref: group-v368265. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1381.774254] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c1caa3b-c259-47bd-9f0d-06333e5ac019 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.783629] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Created folder: Instances in parent group-v368265. [ 1381.783809] env[63371]: DEBUG oslo.service.loopingcall [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.784031] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1381.784635] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30fbd51c-c5c3-4520-9001-367d4ab0bbb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.803047] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1381.803047] env[63371]: value = "task-1773700" [ 1381.803047] env[63371]: _type = "Task" [ 1381.803047] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.811020] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773700, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.857021] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1381.899506] env[63371]: DEBUG nova.compute.utils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1381.903831] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1381.903831] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1382.003268] env[63371]: DEBUG nova.policy [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cb0ef460c0e47ff89cf2b16c6e61933', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dea4a97b99c84f03a6098b321932dc8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1382.025789] env[63371]: DEBUG nova.compute.manager [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Received event network-changed-62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1382.026461] env[63371]: DEBUG nova.compute.manager [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Refreshing instance network info cache due to event network-changed-62ec6b3f-aa36-49ba-ab5a-ce568c16837a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1382.026723] env[63371]: DEBUG oslo_concurrency.lockutils [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] Acquiring lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.026873] env[63371]: DEBUG oslo_concurrency.lockutils [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] Acquired lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.027049] env[63371]: DEBUG nova.network.neutron [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Refreshing network info cache for port 62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1382.044951] env[63371]: DEBUG nova.network.neutron [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.095564] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528598e2-7fc4-0f56-ddf2-8993e0962d7d, 'name': SearchDatastore_Task, 'duration_secs': 0.009351} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.101470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.101704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.101887] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9741dea6-9022-45ce-8731-8098b1949d4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.107769] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1382.107769] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb153-0b46-e681-ae23-84b76036ddad" [ 1382.107769] env[63371]: _type = "Task" [ 1382.107769] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.116320] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb153-0b46-e681-ae23-84b76036ddad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.312896] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773700, 'name': CreateVM_Task, 'duration_secs': 0.392761} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.313085] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1382.313760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.313921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.314267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1382.315052] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Successfully created port: 00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1382.316733] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-534a5b3e-5786-46f3-8f2f-905e6266bc00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.321930] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1382.321930] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52515e7f-2b9e-4408-f22f-bb17c80ec269" [ 1382.321930] env[63371]: _type = "Task" [ 1382.321930] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.329783] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52515e7f-2b9e-4408-f22f-bb17c80ec269, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.378991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.403797] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1382.547289] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.547646] env[63371]: DEBUG nova.objects.instance [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lazy-loading 'migration_context' on Instance uuid f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1382.625920] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb153-0b46-e681-ae23-84b76036ddad, 'name': SearchDatastore_Task, 'duration_secs': 0.009505} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.626229] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.626486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/713dfaf5-d11f-4af2-af92-66a596b0ed4a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1382.626742] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-379853ac-ad00-4ea9-8b42-9a998e632deb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.636783] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1382.636783] env[63371]: value = "task-1773701" [ 1382.636783] env[63371]: _type = "Task" [ 1382.636783] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.649799] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.832792] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52515e7f-2b9e-4408-f22f-bb17c80ec269, 'name': SearchDatastore_Task, 'duration_secs': 0.008608} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.837830] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.838120] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1382.838358] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.838504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.838678] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1382.839218] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-049e94b9-6164-4ec3-a856-859d1ceeab28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.855185] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1382.855387] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1382.856188] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-227c0cfc-3e82-487b-8d12-cbf6b9f6109b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.860970] env[63371]: DEBUG nova.network.neutron [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Updated VIF entry in instance network info cache for port 62ec6b3f-aa36-49ba-ab5a-ce568c16837a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1382.861341] env[63371]: DEBUG nova.network.neutron [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Updating instance_info_cache with network_info: [{"id": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "address": "fa:16:3e:ca:51:d0", "network": {"id": "2a5744d0-ee42-4b97-bf04-f0f41e3e2cdb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-555494198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4bffbe32a94e19a1dc4562f925ca9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ec6b3f-aa", "ovs_interfaceid": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.867480] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1382.867480] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6c86b-8188-3a74-e8d3-213980c8c571" [ 1382.867480] env[63371]: _type = "Task" [ 1382.867480] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.877974] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6c86b-8188-3a74-e8d3-213980c8c571, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.050267] env[63371]: DEBUG nova.objects.base [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1383.051225] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92959437-188b-444c-99e6-55c89137780b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.054920] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7716f675-da37-4f55-b19e-ae975ade4322 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.079780] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da375159-aa48-4a6c-b632-31c269875a5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.082592] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f493ed0b-9eb3-4eda-8c99-4bb684d565b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.088958] env[63371]: DEBUG oslo_vmware.api [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1383.088958] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfbd82-e746-7d20-5ad2-709af40da65e" [ 1383.088958] env[63371]: _type = "Task" [ 1383.088958] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.118315] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19d7611-3478-4b36-aa84-a98b41638d83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.128851] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ad61c9-9861-4923-82dc-2ccb3d7ba51a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.135445] env[63371]: DEBUG oslo_vmware.api [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfbd82-e746-7d20-5ad2-709af40da65e, 'name': SearchDatastore_Task, 'duration_secs': 0.030788} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.135445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.145008] env[63371]: DEBUG nova.compute.provider_tree [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1383.154516] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773701, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501935} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.154829] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/713dfaf5-d11f-4af2-af92-66a596b0ed4a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1383.154985] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1383.155263] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8fdc0aa-609c-4dfd-9a61-7d60721890af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.161702] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1383.161702] env[63371]: value = "task-1773702" [ 1383.161702] env[63371]: _type = "Task" [ 1383.161702] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.172056] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.369056] env[63371]: DEBUG oslo_concurrency.lockutils [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] Releasing lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.379720] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6c86b-8188-3a74-e8d3-213980c8c571, 'name': SearchDatastore_Task, 'duration_secs': 0.056461} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.380609] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17a9d415-5dba-480a-a598-f6017539cfcb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.386337] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1383.386337] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52454de6-f0c8-1310-9fc2-6129d145d232" [ 1383.386337] env[63371]: _type = "Task" [ 1383.386337] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.396671] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52454de6-f0c8-1310-9fc2-6129d145d232, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.418428] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1383.446172] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1383.446422] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1383.446575] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.446756] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1383.446903] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.447070] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1383.447253] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1383.447405] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1383.447645] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1383.447809] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1383.447973] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1383.448836] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353c4a01-6bf2-44be-b297-c0bc6043d1d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.456742] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335324ef-0771-4f67-9910-21f69cf8edab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.650939] env[63371]: DEBUG nova.scheduler.client.report [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1383.671895] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074267} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.672127] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1383.672916] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35de7df-6c44-4376-82ac-4448e4237c1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.696987] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/713dfaf5-d11f-4af2-af92-66a596b0ed4a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1383.697577] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a5ec5d3-b89d-4f71-92ae-3b1cd90ebcc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.718456] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1383.718456] env[63371]: value = "task-1773703" [ 1383.718456] env[63371]: _type = "Task" [ 1383.718456] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.727893] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773703, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.896655] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52454de6-f0c8-1310-9fc2-6129d145d232, 'name': SearchDatastore_Task, 'duration_secs': 0.017155} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.896923] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.897192] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4/af1281ba-c3be-43b4-a039-86d94bd9efe4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1383.897949] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83b9189a-270b-4a4a-a9a0-2d2588abaea8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.904026] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Successfully updated port: 00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1383.906793] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1383.906793] env[63371]: value = "task-1773704" [ 1383.906793] env[63371]: _type = "Task" [ 1383.906793] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.914097] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773704, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.118234] env[63371]: DEBUG nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Received event network-vif-plugged-00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1384.118469] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Acquiring lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.118697] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.118899] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.119075] env[63371]: DEBUG nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] No waiting events found dispatching network-vif-plugged-00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1384.119261] env[63371]: WARNING nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Received unexpected event network-vif-plugged-00f821d3-2f0a-46f0-9551-f7eefb581c66 for instance with vm_state building and task_state spawning. [ 1384.119382] env[63371]: DEBUG nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Received event network-changed-00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1384.119528] env[63371]: DEBUG nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Refreshing instance network info cache due to event network-changed-00f821d3-2f0a-46f0-9551-f7eefb581c66. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1384.119747] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Acquiring lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.119880] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Acquired lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.120062] env[63371]: DEBUG nova.network.neutron [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Refreshing network info cache for port 00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1384.155890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.762s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.156403] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1384.160450] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.891s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.160648] env[63371]: DEBUG nova.objects.instance [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lazy-loading 'resources' on Instance uuid a43fed87-5205-4148-834e-66778a90b7bc {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1384.228662] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773703, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.406936] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.419176] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773704, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.652843] env[63371]: DEBUG nova.network.neutron [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1384.664424] env[63371]: DEBUG nova.compute.utils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1384.665808] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1384.665908] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1384.731305] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773703, 'name': ReconfigVM_Task, 'duration_secs': 0.905326} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.731305] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/713dfaf5-d11f-4af2-af92-66a596b0ed4a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.734030] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cf6b472-9cf3-4a6b-81c1-dd343456589d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.739365] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1384.739365] env[63371]: value = "task-1773705" [ 1384.739365] env[63371]: _type = "Task" [ 1384.739365] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.748402] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773705, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.787326] env[63371]: DEBUG nova.policy [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a258c38635014fdf9c6e3907bda2fd03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a884a9d1a3ae410b858851431c166183', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1384.827587] env[63371]: DEBUG nova.network.neutron [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.920595] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773704, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.706564} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.920900] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4/af1281ba-c3be-43b4-a039-86d94bd9efe4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1384.921344] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1384.921642] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e674894c-792c-4a38-b50c-0cd5cdbfe1e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.930455] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1384.930455] env[63371]: value = "task-1773706" [ 1384.930455] env[63371]: _type = "Task" [ 1384.930455] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.942125] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773706, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.172578] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1385.252771] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773705, 'name': Rename_Task, 'duration_secs': 0.221526} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.252771] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1385.252771] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7e51c8e-a3c6-4ba9-9438-81c347b168f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.256874] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Successfully created port: 3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1385.265185] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1385.265185] env[63371]: value = "task-1773707" [ 1385.265185] env[63371]: _type = "Task" [ 1385.265185] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.277197] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773707, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.281037] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f61e4b-76cb-42d8-9504-c3f9837916bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.291021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ee3973-e752-490e-bbc0-c7022c58596b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.321954] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748d7296-ef5f-4d6f-a188-00414fbe3144 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.328900] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Releasing lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.330276] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b853753-dcac-4b20-8ab9-529b7167d978 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.334164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquired lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.334327] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1385.345397] env[63371]: DEBUG nova.compute.provider_tree [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1385.440861] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773706, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069056} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.441285] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1385.442124] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239baa92-9a9a-44b0-a050-5b506666b132 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.464531] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4/af1281ba-c3be-43b4-a039-86d94bd9efe4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1385.464892] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55b398fa-252e-40ea-85bc-e7fadce0c885 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.484214] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1385.484214] env[63371]: value = "task-1773708" [ 1385.484214] env[63371]: _type = "Task" [ 1385.484214] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.493780] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773708, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.775300] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773707, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.850251] env[63371]: DEBUG nova.scheduler.client.report [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1385.876066] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1385.994794] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773708, 'name': ReconfigVM_Task, 'duration_secs': 0.454287} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.995079] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Reconfigured VM instance instance-00000017 to attach disk [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4/af1281ba-c3be-43b4-a039-86d94bd9efe4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1385.995759] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39c3d55f-99e1-47f7-8ff1-828e5ada22de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.002799] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1386.002799] env[63371]: value = "task-1773709" [ 1386.002799] env[63371]: _type = "Task" [ 1386.002799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.014597] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773709, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.071382] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Updating instance_info_cache with network_info: [{"id": "00f821d3-2f0a-46f0-9551-f7eefb581c66", "address": "fa:16:3e:f0:75:64", "network": {"id": "19de600e-a1b8-4d10-9f47-0d72c8817654", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-764261102-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dea4a97b99c84f03a6098b321932dc8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00f821d3-2f", "ovs_interfaceid": "00f821d3-2f0a-46f0-9551-f7eefb581c66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.185951] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1386.219965] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1386.220133] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1386.220244] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1386.220563] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1386.220563] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1386.220685] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1386.222442] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c094e4-58bd-4e29-b71a-ede3a565487b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.230329] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8212f371-1b32-43a3-b523-2c41708700df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.278155] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773707, 'name': PowerOnVM_Task, 'duration_secs': 1.011968} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.278155] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1386.278155] env[63371]: INFO nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1386.278155] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1386.278628] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fafa185-f6f2-4498-bc2a-d5c26750d70e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.355882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.358834] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.358834] env[63371]: DEBUG nova.objects.instance [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lazy-loading 'resources' on Instance uuid 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1386.382115] env[63371]: INFO nova.scheduler.client.report [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Deleted allocations for instance a43fed87-5205-4148-834e-66778a90b7bc [ 1386.513227] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773709, 'name': Rename_Task, 'duration_secs': 0.136223} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.513563] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1386.513846] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63cb267f-0aad-4105-90fc-4830d5c771d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.520108] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1386.520108] env[63371]: value = "task-1773710" [ 1386.520108] env[63371]: _type = "Task" [ 1386.520108] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.528339] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773710, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.571211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Releasing lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.571577] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance network_info: |[{"id": "00f821d3-2f0a-46f0-9551-f7eefb581c66", "address": "fa:16:3e:f0:75:64", "network": {"id": "19de600e-a1b8-4d10-9f47-0d72c8817654", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-764261102-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dea4a97b99c84f03a6098b321932dc8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00f821d3-2f", "ovs_interfaceid": "00f821d3-2f0a-46f0-9551-f7eefb581c66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1386.572031] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:75:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00f821d3-2f0a-46f0-9551-f7eefb581c66', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1386.581170] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Creating folder: Project (dea4a97b99c84f03a6098b321932dc8f). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.581864] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a12f2c70-51ae-44ba-ad46-7b6295d44637 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.592487] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Created folder: Project (dea4a97b99c84f03a6098b321932dc8f) in parent group-v368199. [ 1386.593017] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Creating folder: Instances. Parent ref: group-v368268. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.593017] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-392fc642-e09f-478e-b828-8fc0f826a1a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.602563] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Created folder: Instances in parent group-v368268. [ 1386.602822] env[63371]: DEBUG oslo.service.loopingcall [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.603237] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1386.603471] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ec37b25-a333-48c6-868e-9f5398a636b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.625352] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1386.625352] env[63371]: value = "task-1773713" [ 1386.625352] env[63371]: _type = "Task" [ 1386.625352] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.633465] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773713, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.795549] env[63371]: INFO nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Took 47.12 seconds to build instance. [ 1386.867873] env[63371]: DEBUG nova.compute.manager [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Received event network-vif-plugged-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1386.868104] env[63371]: DEBUG oslo_concurrency.lockutils [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] Acquiring lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.868309] env[63371]: DEBUG oslo_concurrency.lockutils [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] Lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.868472] env[63371]: DEBUG oslo_concurrency.lockutils [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] Lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.868643] env[63371]: DEBUG nova.compute.manager [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] No waiting events found dispatching network-vif-plugged-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1386.868793] env[63371]: WARNING nova.compute.manager [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Received unexpected event network-vif-plugged-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 for instance with vm_state building and task_state spawning. [ 1386.891249] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.707s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.034760] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773710, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.082188] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Successfully updated port: 3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1387.136117] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773713, 'name': CreateVM_Task, 'duration_secs': 0.395743} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.138719] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1387.139696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.140015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.141377] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1387.141377] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50bf8dce-7d97-4c62-b65f-6659209fc7f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.146027] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1387.146027] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5289981c-efb9-7280-a166-9a5676f68a75" [ 1387.146027] env[63371]: _type = "Task" [ 1387.146027] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.155291] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5289981c-efb9-7280-a166-9a5676f68a75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.298292] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.349s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.481181] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08755a23-bc05-4d17-86c7-8da3b5696d2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.488576] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c311c8-a211-4af6-aa01-6864a9c239cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.520562] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6225d00-157e-423f-acac-d60f3dfb4a01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.534310] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513dea5a-385a-43c2-9894-75ac40eaccbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.537548] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773710, 'name': PowerOnVM_Task, 'duration_secs': 0.618872} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.539019] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1387.539019] env[63371]: INFO nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Took 8.05 seconds to spawn the instance on the hypervisor. [ 1387.539019] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1387.539341] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519e27c1-50a3-49f5-8ed1-ba998095e05c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.551814] env[63371]: DEBUG nova.compute.provider_tree [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.586873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.586873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.586873] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.667164] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5289981c-efb9-7280-a166-9a5676f68a75, 'name': SearchDatastore_Task, 'duration_secs': 0.013742} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.667164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.667164] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1387.667164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.667451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.667451] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1387.667632] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5f53aed-7da8-434a-9afa-46b4a5666652 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.682337] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1387.682623] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1387.683357] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92f912b7-8802-4089-97ce-bdcf5c050caf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.688799] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1387.688799] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526234a1-49f4-57c2-075c-8b190191cb64" [ 1387.688799] env[63371]: _type = "Task" [ 1387.688799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.697166] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526234a1-49f4-57c2-075c-8b190191cb64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.800221] env[63371]: INFO nova.compute.manager [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Rescuing [ 1387.800788] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.800959] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.801177] env[63371]: DEBUG nova.network.neutron [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.802488] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1388.057515] env[63371]: DEBUG nova.scheduler.client.report [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1388.069392] env[63371]: INFO nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Took 45.65 seconds to build instance. [ 1388.127386] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1388.212043] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526234a1-49f4-57c2-075c-8b190191cb64, 'name': SearchDatastore_Task, 'duration_secs': 0.026288} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.213392] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc575a20-c1c5-4337-8761-35a99cc243da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.224256] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1388.224256] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521bd231-63ff-4520-1421-540075498cbe" [ 1388.224256] env[63371]: _type = "Task" [ 1388.224256] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.236222] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521bd231-63ff-4520-1421-540075498cbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.329091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.351435] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Updating instance_info_cache with network_info: [{"id": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "address": "fa:16:3e:24:0c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5c963f-1c", "ovs_interfaceid": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.567754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.209s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.572596] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.549s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.575109] env[63371]: INFO nova.compute.claims [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1388.579116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.706s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.597392] env[63371]: INFO nova.scheduler.client.report [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Deleted allocations for instance 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d [ 1388.700257] env[63371]: DEBUG nova.network.neutron [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.738268] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521bd231-63ff-4520-1421-540075498cbe, 'name': SearchDatastore_Task, 'duration_secs': 0.015793} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.738268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.738268] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406/3f79bc3e-4dd4-4b5f-a5ba-a17124e70406.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1388.738268] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58f0be02-5761-4174-b054-aac7d406d5f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.746633] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1388.746633] env[63371]: value = "task-1773714" [ 1388.746633] env[63371]: _type = "Task" [ 1388.746633] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.758434] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.853591] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.854020] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance network_info: |[{"id": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "address": "fa:16:3e:24:0c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5c963f-1c", "ovs_interfaceid": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1388.854495] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:0c:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c5c963f-1c9c-4d03-bb01-5670b9fe06b4', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1388.862753] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating folder: Project (a884a9d1a3ae410b858851431c166183). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1388.863460] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e5c94c4-fcde-411e-9ed5-b0e214e4fdc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.874404] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Created folder: Project (a884a9d1a3ae410b858851431c166183) in parent group-v368199. [ 1388.875812] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating folder: Instances. Parent ref: group-v368271. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1388.876164] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfad2b05-2aa4-448b-8d6d-442045f9507c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.886621] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Created folder: Instances in parent group-v368271. [ 1388.886621] env[63371]: DEBUG oslo.service.loopingcall [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1388.886807] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1388.887021] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1cc79c8-482a-4264-a99f-fd99725b5beb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.906463] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1388.906463] env[63371]: value = "task-1773717" [ 1388.906463] env[63371]: _type = "Task" [ 1388.906463] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.916118] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773717, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.083615] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1389.114026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.347s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.203241] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.258760] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773714, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.280295] env[63371]: DEBUG nova.compute.manager [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Received event network-changed-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1389.280295] env[63371]: DEBUG nova.compute.manager [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Refreshing instance network info cache due to event network-changed-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1389.280295] env[63371]: DEBUG oslo_concurrency.lockutils [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] Acquiring lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.280852] env[63371]: DEBUG oslo_concurrency.lockutils [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] Acquired lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.280852] env[63371]: DEBUG nova.network.neutron [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Refreshing network info cache for port 3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1389.282436] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "interface-af1281ba-c3be-43b4-a039-86d94bd9efe4-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.282643] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "interface-af1281ba-c3be-43b4-a039-86d94bd9efe4-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.282932] env[63371]: DEBUG nova.objects.instance [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lazy-loading 'flavor' on Instance uuid af1281ba-c3be-43b4-a039-86d94bd9efe4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1389.416860] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773717, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.616884] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.743116] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1389.743413] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a401458-14eb-403a-9427-b2603e0194bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.758853] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1389.758853] env[63371]: value = "task-1773718" [ 1389.758853] env[63371]: _type = "Task" [ 1389.758853] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.767258] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.768048] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406/3f79bc3e-4dd4-4b5f-a5ba-a17124e70406.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1389.768279] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1389.770887] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b1cadb7-017c-41bf-a6a5-7fc100773278 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.776627] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.781930] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1389.781930] env[63371]: value = "task-1773719" [ 1389.781930] env[63371]: _type = "Task" [ 1389.781930] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.793634] env[63371]: DEBUG nova.objects.instance [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lazy-loading 'pci_requests' on Instance uuid af1281ba-c3be-43b4-a039-86d94bd9efe4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1389.800339] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.918771] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773717, 'name': CreateVM_Task, 'duration_secs': 0.797774} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.919146] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1389.919960] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.920372] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.920709] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1389.921097] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-719d3ee0-f1bb-419d-b1c0-c4e84d9a7046 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.930112] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1389.930112] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5257b475-dcb9-d7ee-63c1-32587bc56ede" [ 1389.930112] env[63371]: _type = "Task" [ 1389.930112] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.936749] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5257b475-dcb9-d7ee-63c1-32587bc56ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.015188] env[63371]: DEBUG nova.network.neutron [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Updated VIF entry in instance network info cache for port 3c5c963f-1c9c-4d03-bb01-5670b9fe06b4. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1390.015539] env[63371]: DEBUG nova.network.neutron [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Updating instance_info_cache with network_info: [{"id": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "address": "fa:16:3e:24:0c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5c963f-1c", "ovs_interfaceid": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.183135] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a86db3-efdb-4349-8383-8bbfa90cd712 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.191854] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d6a272-2d1f-439b-9d2e-1b4f1b1864ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.229171] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9eb569f-359e-4931-9afd-d984a79aba5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.238056] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6b6ebb-524b-497a-97c6-42e29c005c66 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.254437] env[63371]: DEBUG nova.compute.provider_tree [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.268949] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773718, 'name': PowerOffVM_Task, 'duration_secs': 0.245299} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.269212] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1390.270018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159754af-0c86-4fd4-a857-3912d02c7d1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.293496] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aceb2038-cfb4-493b-add3-e83e7d4e86c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.298069] env[63371]: DEBUG nova.objects.base [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1390.298069] env[63371]: DEBUG nova.network.neutron [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1390.302998] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071083} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.305309] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.307798] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64cb22d-dfbe-4792-b457-8e575356e120 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.330210] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406/3f79bc3e-4dd4-4b5f-a5ba-a17124e70406.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.330505] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dc18440-b284-4931-9097-c64e692e062e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.352511] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1390.352511] env[63371]: value = "task-1773720" [ 1390.352511] env[63371]: _type = "Task" [ 1390.352511] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.354371] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1390.354693] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d4e865e-9e89-4bdc-8c6f-689caf5a6aaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.365896] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773720, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.367157] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1390.367157] env[63371]: value = "task-1773721" [ 1390.367157] env[63371]: _type = "Task" [ 1390.367157] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.376237] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1390.376552] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.376844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.377039] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.377255] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1390.377532] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e34530bb-c18a-412a-a86f-bf8ce2b9c66c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.384996] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1390.385439] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1390.385928] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee1f1c36-05f6-44b4-962f-38dbaee14679 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.391921] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1390.391921] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52558f94-3df3-51c2-d4b4-22573e48660c" [ 1390.391921] env[63371]: _type = "Task" [ 1390.391921] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.398781] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52558f94-3df3-51c2-d4b4-22573e48660c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.427811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "interface-af1281ba-c3be-43b4-a039-86d94bd9efe4-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.145s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.439498] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5257b475-dcb9-d7ee-63c1-32587bc56ede, 'name': SearchDatastore_Task, 'duration_secs': 0.018911} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.440478] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.440759] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.441085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.518061] env[63371]: DEBUG oslo_concurrency.lockutils [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] Releasing lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.758914] env[63371]: DEBUG nova.scheduler.client.report [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1390.864394] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773720, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.903055] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52558f94-3df3-51c2-d4b4-22573e48660c, 'name': SearchDatastore_Task, 'duration_secs': 0.041584} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.903780] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4a5eed4-4915-40ba-a7ef-9a84dac974be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.911129] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1390.911129] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52344555-299a-ff83-35c8-98e556a26929" [ 1390.911129] env[63371]: _type = "Task" [ 1390.911129] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.924042] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52344555-299a-ff83-35c8-98e556a26929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.267992] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.696s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.268421] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1391.276438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.667s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.276438] env[63371]: INFO nova.compute.claims [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1391.364241] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773720, 'name': ReconfigVM_Task, 'duration_secs': 0.791229} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.364401] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406/3f79bc3e-4dd4-4b5f-a5ba-a17124e70406.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.365417] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac784563-a876-4237-a37e-199a6ea68ab2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.373871] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1391.373871] env[63371]: value = "task-1773722" [ 1391.373871] env[63371]: _type = "Task" [ 1391.373871] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.382683] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773722, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.423034] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52344555-299a-ff83-35c8-98e556a26929, 'name': SearchDatastore_Task, 'duration_secs': 0.034389} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.423437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.423720] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1391.424064] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.424308] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1391.424592] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a172851f-04a2-417c-a097-0ecf3e2d7091 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.427408] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22829eb9-2e65-4cb9-92c5-1aa4bead7f11 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.435533] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1391.435533] env[63371]: value = "task-1773723" [ 1391.435533] env[63371]: _type = "Task" [ 1391.435533] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.436914] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1391.437643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1391.445815] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687950c9-5ce3-412e-a938-64576b8ade64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.458009] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1391.458009] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d839a5-99e5-d715-a727-d5e91609c733" [ 1391.458009] env[63371]: _type = "Task" [ 1391.458009] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.458343] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.467482] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d839a5-99e5-d715-a727-d5e91609c733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.783279] env[63371]: DEBUG nova.compute.utils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.788146] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1391.788146] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1391.888957] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773722, 'name': Rename_Task, 'duration_secs': 0.145421} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.889342] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.890916] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29bef8a8-342d-4aa6-8964-f441e8fbad28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.899869] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1391.899869] env[63371]: value = "task-1773724" [ 1391.899869] env[63371]: _type = "Task" [ 1391.899869] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.909690] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.922809] env[63371]: DEBUG nova.policy [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15eb038ed0dc4c9d9f948d154c244a32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf723c611d61478cbb81b2bc474a74f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1391.950539] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773723, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.970381] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d839a5-99e5-d715-a727-d5e91609c733, 'name': SearchDatastore_Task, 'duration_secs': 0.019036} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.971383] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-035ac7b3-9b0b-48a8-895f-59c12287b15f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.976557] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1391.976557] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5270d4b2-80d2-d1d6-f862-4fc90e819147" [ 1391.976557] env[63371]: _type = "Task" [ 1391.976557] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.995162] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5270d4b2-80d2-d1d6-f862-4fc90e819147, 'name': SearchDatastore_Task, 'duration_secs': 0.010898} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.997463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.997780] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8/76c861a7-30f2-40f4-b723-7912975f36f8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1391.998039] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5aafbd75-a313-4e22-8027-a7ac4dae1c79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.005874] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1392.005874] env[63371]: value = "task-1773725" [ 1392.005874] env[63371]: _type = "Task" [ 1392.005874] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.014994] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.288518] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1392.419026] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773724, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.452294] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773723, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531649} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.452579] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1392.453455] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ad8e3e-deb0-49ab-a81e-87eb9fe43013 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.487029] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1392.489279] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f85f40b-263c-4dc4-b734-8d94cd59a128 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.518916] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507467} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.518916] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Successfully created port: 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1392.523930] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8/76c861a7-30f2-40f4-b723-7912975f36f8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1392.524163] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1392.524464] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1392.524464] env[63371]: value = "task-1773726" [ 1392.524464] env[63371]: _type = "Task" [ 1392.524464] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.527250] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1007a94c-a1f8-4cb0-b9a1-e3a82360c2e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.538929] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.540379] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1392.540379] env[63371]: value = "task-1773727" [ 1392.540379] env[63371]: _type = "Task" [ 1392.540379] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.558162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.558492] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.558758] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.559057] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.559296] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.562618] env[63371]: INFO nova.compute.manager [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Terminating instance [ 1392.568346] env[63371]: DEBUG nova.compute.manager [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1392.568346] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1392.572299] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2266836-a965-4df7-9bd8-01c26c711fc6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.582164] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1392.582487] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de330ee2-9dcd-4cf1-9303-af758127574b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.588774] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1392.588774] env[63371]: value = "task-1773728" [ 1392.588774] env[63371]: _type = "Task" [ 1392.588774] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.600618] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.799708] env[63371]: INFO nova.virt.block_device [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Booting with volume 1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8 at /dev/sda [ 1392.869668] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e162c4be-5497-407b-a2f8-9ba67668ca44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.885209] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c35d95-0b92-4951-99e7-cd0260d4d1ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.911013] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773724, 'name': PowerOnVM_Task, 'duration_secs': 0.794425} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.922906] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1392.923165] env[63371]: INFO nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Took 9.50 seconds to spawn the instance on the hypervisor. [ 1392.923342] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1392.924371] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99baa3c-4dd2-4efd-a8f4-3f71ea81995a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.927061] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a71a912-ad2f-4407-bf08-f186cdfc1ed7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.942783] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5445c40-a30e-4d46-a7b8-7b33e12e5b80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.989925] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8716f1f3-a6e0-4988-a038-01ea7700acc7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.997626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492b180e-b7a8-48b8-8dd0-82a98712add3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.012262] env[63371]: DEBUG nova.virt.block_device [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating existing volume attachment record: d0b4bf9b-f6ef-410a-a228-58c967414f22 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1393.044691] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.051266] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164337} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.051559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1393.052377] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53655875-5f81-402d-9427-af1163b45fdb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.087197] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8/76c861a7-30f2-40f4-b723-7912975f36f8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1393.091602] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bce3409-9f16-467a-b01d-a8b73db16eb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.115772] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773728, 'name': PowerOffVM_Task, 'duration_secs': 0.413569} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.117135] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.118090] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.118090] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1393.118090] env[63371]: value = "task-1773729" [ 1393.118090] env[63371]: _type = "Task" [ 1393.118090] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.118090] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8b2e74c-93a7-49ea-becb-0b736e661f7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.127706] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773729, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.235330] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f385370-5907-4aee-9528-09899162db82 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.246726] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a49fc80-ce3e-46bb-97dd-0b4c1561f6b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.284238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58d1835-5abb-42f1-8ab4-9c49a6d1728f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.286623] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.286843] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.287025] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Deleting the datastore file [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.287670] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7fea87f-8389-4d59-a122-5e3c6da3edd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.297074] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032d6084-7b4a-4037-b6ed-b198a5943f22 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.300388] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1393.300388] env[63371]: value = "task-1773731" [ 1393.300388] env[63371]: _type = "Task" [ 1393.300388] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.311840] env[63371]: DEBUG nova.compute.provider_tree [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1393.318240] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773731, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.452048] env[63371]: INFO nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Took 49.06 seconds to build instance. [ 1393.540839] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773726, 'name': ReconfigVM_Task, 'duration_secs': 0.847165} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.541191] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1393.542086] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f6f5dd-c983-48e8-b247-8ef8df036233 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.578535] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9206c9a4-1e0d-4343-a05c-ca08386470c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.597759] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1393.597759] env[63371]: value = "task-1773732" [ 1393.597759] env[63371]: _type = "Task" [ 1393.597759] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.617938] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773732, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.631150] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773729, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.819293] env[63371]: DEBUG nova.scheduler.client.report [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1393.824320] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773731, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.472805} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.825351] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1393.825351] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1393.825521] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1393.825819] env[63371]: INFO nova.compute.manager [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1393.826215] env[63371]: DEBUG oslo.service.loopingcall [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.826507] env[63371]: DEBUG nova.compute.manager [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1393.826662] env[63371]: DEBUG nova.network.neutron [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1393.855828] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.856313] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.955527] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.802s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.118530] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773732, 'name': ReconfigVM_Task, 'duration_secs': 0.373144} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.119174] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1394.119393] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f96d445-119d-4b42-90f7-31fd5402757a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.131031] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773729, 'name': ReconfigVM_Task, 'duration_secs': 0.848276} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.132351] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8/76c861a7-30f2-40f4-b723-7912975f36f8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1394.133017] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1394.133017] env[63371]: value = "task-1773733" [ 1394.133017] env[63371]: _type = "Task" [ 1394.133017] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.135148] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7701c36-df31-46b0-959d-de01424d9c7c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.144170] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1394.144170] env[63371]: value = "task-1773734" [ 1394.144170] env[63371]: _type = "Task" [ 1394.144170] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.147272] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773733, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.157512] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773734, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.329026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.054s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.329026] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1394.330709] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 39.396s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.331104] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.331374] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1394.331760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.128s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.334500] env[63371]: INFO nova.compute.claims [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1394.340179] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f04fe37-714d-4d0b-a4b6-9cb295a00cb5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.355713] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c19a01-5880-41b1-b7d0-8a1831835ae9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.374158] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188d0a22-2d45-42ad-b6e1-331fa9f5c4e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.383015] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd5a7aa-81a8-4026-ba08-530496da1f86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.423066] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180028MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1394.423487] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.459265] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1394.610322] env[63371]: DEBUG nova.compute.manager [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received event network-vif-plugged-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1394.610692] env[63371]: DEBUG oslo_concurrency.lockutils [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] Acquiring lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.611049] env[63371]: DEBUG oslo_concurrency.lockutils [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.611347] env[63371]: DEBUG oslo_concurrency.lockutils [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.611916] env[63371]: DEBUG nova.compute.manager [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] No waiting events found dispatching network-vif-plugged-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1394.611916] env[63371]: WARNING nova.compute.manager [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received unexpected event network-vif-plugged-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 for instance with vm_state building and task_state block_device_mapping. [ 1394.654804] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773733, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.655713] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Successfully updated port: 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.663362] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773734, 'name': Rename_Task, 'duration_secs': 0.209442} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.663652] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1394.663911] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7b0e2c9-5baa-4609-9bd2-c9c534fc5a47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.672280] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1394.672280] env[63371]: value = "task-1773735" [ 1394.672280] env[63371]: _type = "Task" [ 1394.672280] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.694076] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.833044] env[63371]: DEBUG nova.compute.utils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1394.834748] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1394.835202] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1394.850218] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.850567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.851250] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.851624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.851864] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.856242] env[63371]: INFO nova.compute.manager [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Terminating instance [ 1394.858098] env[63371]: DEBUG nova.compute.manager [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1394.858366] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1394.859436] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7e43e6-28a9-4b78-8859-5059c0cf120d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.867508] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1394.867752] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d5a0933-4d6a-4724-bc9d-c8e0dad151c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.873812] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1394.873812] env[63371]: value = "task-1773736" [ 1394.873812] env[63371]: _type = "Task" [ 1394.873812] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.882344] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.915900] env[63371]: DEBUG nova.policy [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a258c38635014fdf9c6e3907bda2fd03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a884a9d1a3ae410b858851431c166183', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1394.990380] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.010959] env[63371]: DEBUG nova.network.neutron [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.152920] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773733, 'name': PowerOnVM_Task, 'duration_secs': 0.566441} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.153205] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1395.155942] env[63371]: DEBUG nova.compute.manager [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1395.156924] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc5769e-7f27-4136-8894-ed4f38a69a5a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.161509] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.161641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquired lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.161779] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1395.170620] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1395.171153] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1395.171360] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1395.172051] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1395.172051] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1395.172051] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1395.172051] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1395.173189] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1395.173573] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1395.173605] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1395.173757] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1395.173938] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1395.180019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb65e65c-2239-4d10-a736-ad2e67e1106a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.194441] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56385f61-9e8b-4801-9590-c0c6e9f3ff27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.198864] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773735, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.271070] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Successfully created port: 912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1395.339020] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1395.385428] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773736, 'name': PowerOffVM_Task, 'duration_secs': 0.226973} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.388719] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1395.388896] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1395.389780] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50efacd1-3794-44bd-8605-69e16e3db27b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.494655] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1395.495038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1395.495106] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Deleting the datastore file [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1395.495322] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d92000a-3a15-4088-9f96-13c0fcb2121f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.501816] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1395.501816] env[63371]: value = "task-1773738" [ 1395.501816] env[63371]: _type = "Task" [ 1395.501816] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.513869] env[63371]: INFO nova.compute.manager [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Took 1.69 seconds to deallocate network for instance. [ 1395.514214] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.696252] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773735, 'name': PowerOnVM_Task, 'duration_secs': 0.675943} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.697037] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1395.697277] env[63371]: INFO nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 9.51 seconds to spawn the instance on the hypervisor. [ 1395.697466] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1395.698347] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e083cf3-5b26-4087-9754-b205a5aef8af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.773975] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.945237] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96ca776-363f-46c9-8ece-0568087bf7eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.952898] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a723c8-7492-46ef-8ad8-0aaedfbd76c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.989904] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8472344c-6910-4ad6-bbea-8fef60ad05fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.998117] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa8aa0f-763e-48d7-8a1c-ca5b21df1c36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.021493] env[63371]: DEBUG nova.compute.provider_tree [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.028742] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.028742] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.434547} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.028742] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1396.029104] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1396.029333] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1396.029538] env[63371]: INFO nova.compute.manager [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1396.029836] env[63371]: DEBUG oslo.service.loopingcall [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.030082] env[63371]: DEBUG nova.compute.manager [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1396.030306] env[63371]: DEBUG nova.network.neutron [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1396.112542] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating instance_info_cache with network_info: [{"id": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "address": "fa:16:3e:97:82:d5", "network": {"id": "f378570d-e12e-4a4b-b779-b22a48508774", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1065947988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf723c611d61478cbb81b2bc474a74f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5279ae43-ba", "ovs_interfaceid": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.223375] env[63371]: INFO nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 51.63 seconds to build instance. [ 1396.347829] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1396.378757] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1396.378757] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1396.378757] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1396.378896] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1396.378896] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1396.378896] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1396.378896] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1396.379958] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1396.380299] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1396.380617] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1396.380941] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1396.384194] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58743bf-3c4a-415d-8de8-40e173df6405 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.391220] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01113c4e-71c1-4b0d-a176-ddfef7373e9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.528603] env[63371]: DEBUG nova.scheduler.client.report [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1396.615265] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Releasing lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.615586] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance network_info: |[{"id": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "address": "fa:16:3e:97:82:d5", "network": {"id": "f378570d-e12e-4a4b-b779-b22a48508774", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1065947988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf723c611d61478cbb81b2bc474a74f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5279ae43-ba", "ovs_interfaceid": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1396.615998] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:82:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1396.623810] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Creating folder: Project (bf723c611d61478cbb81b2bc474a74f4). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.624151] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27128b87-b46b-4da2-ad96-3bb5cf05c617 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.639997] env[63371]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1396.640142] env[63371]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63371) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1396.640467] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Folder already exists: Project (bf723c611d61478cbb81b2bc474a74f4). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1396.640659] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Creating folder: Instances. Parent ref: group-v368212. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.641136] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb3f02c3-a97d-43d4-90d7-0b830b1cc23e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.651565] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Created folder: Instances in parent group-v368212. [ 1396.651794] env[63371]: DEBUG oslo.service.loopingcall [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.652423] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1396.652554] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49dd819b-4c8f-4095-9caf-8a287ab61859 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.676938] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.676938] env[63371]: value = "task-1773741" [ 1396.676938] env[63371]: _type = "Task" [ 1396.676938] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.687089] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773741, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.728917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.623s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.835937] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Received event network-vif-deleted-62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.835937] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received event network-changed-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.835937] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Refreshing instance network info cache due to event network-changed-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1396.836155] env[63371]: DEBUG oslo_concurrency.lockutils [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] Acquiring lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.836155] env[63371]: DEBUG oslo_concurrency.lockutils [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] Acquired lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.836285] env[63371]: DEBUG nova.network.neutron [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Refreshing network info cache for port 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.984890] env[63371]: DEBUG nova.network.neutron [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.033717] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.034273] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1397.038318] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.797s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.038318] env[63371]: DEBUG nova.objects.instance [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lazy-loading 'resources' on Instance uuid 362d8303-524a-457a-b8d9-2bad87fa816b {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1397.187741] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773741, 'name': CreateVM_Task, 'duration_secs': 0.352925} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.187922] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.188707] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'disk_bus': None, 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'}, 'boot_index': 0, 'device_type': None, 'attachment_id': 'd0b4bf9b-f6ef-410a-a228-58c967414f22', 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=63371) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1397.188927] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Root volume attach. Driver type: vmdk {{(pid=63371) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1397.191095] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc58ac26-0cb6-488a-adf3-29714dc48543 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.198912] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37eb0af-4a68-447b-8457-bb013ff6186a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.205799] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c699f87-2954-4c27-bf90-3a71094e644b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.214552] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-cb8b4669-50de-4b50-b325-6077d93cd823 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.220425] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Successfully updated port: 912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1397.227480] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1397.227480] env[63371]: value = "task-1773742" [ 1397.227480] env[63371]: _type = "Task" [ 1397.227480] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.232220] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1397.237882] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773742, 'name': RelocateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.290647] env[63371]: DEBUG nova.compute.manager [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.290880] env[63371]: DEBUG nova.compute.manager [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1397.291201] env[63371]: DEBUG oslo_concurrency.lockutils [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.291273] env[63371]: DEBUG oslo_concurrency.lockutils [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.291804] env[63371]: DEBUG nova.network.neutron [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.484596] env[63371]: INFO nova.compute.manager [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Took 1.45 seconds to deallocate network for instance. [ 1397.541098] env[63371]: DEBUG nova.compute.utils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1397.549604] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1397.549817] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1397.668900] env[63371]: DEBUG nova.policy [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b3af3bbd35846198784331994497179', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '857815a7f15648948bb4ca862473ed06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1397.724991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.724991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.724991] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1397.738935] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773742, 'name': RelocateVM_Task, 'duration_secs': 0.027045} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.742279] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1397.742518] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1397.746258] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3866246e-3235-4870-a4c0-3ef8d07a24bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.751622] env[63371]: DEBUG nova.network.neutron [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updated VIF entry in instance network info cache for port 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.752931] env[63371]: DEBUG nova.network.neutron [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating instance_info_cache with network_info: [{"id": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "address": "fa:16:3e:97:82:d5", "network": {"id": "f378570d-e12e-4a4b-b779-b22a48508774", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1065947988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf723c611d61478cbb81b2bc474a74f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5279ae43-ba", "ovs_interfaceid": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.770324] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.774640] env[63371]: DEBUG oslo_concurrency.lockutils [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] Releasing lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.774810] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Received event network-vif-deleted-00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.775145] env[63371]: INFO nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Neutron deleted interface 00f821d3-2f0a-46f0-9551-f7eefb581c66; detaching it from the instance and deleting it from the info cache [ 1397.775529] env[63371]: DEBUG nova.network.neutron [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.781027] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31ba630-e355-4e9a-beca-d3383150fbe5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.806772] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8/volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.809951] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23e8b310-9928-4bc5-b669-2e498ced8a06 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.832701] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1397.832701] env[63371]: value = "task-1773743" [ 1397.832701] env[63371]: _type = "Task" [ 1397.832701] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.844367] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773743, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.993536] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.052547] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1398.283529] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-567050ff-b558-4352-b40b-df495b0ff71c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.291277] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde881d4-98b4-4071-b6c1-e89c589474b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.329457] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Detach interface failed, port_id=00f821d3-2f0a-46f0-9551-f7eefb581c66, reason: Instance 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1398.331449] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1398.341582] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773743, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.444767] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bac1765-7ac1-4339-b1c2-26a5f6343e79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.452607] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e005f6-3c46-44bb-acb4-83cbc4fbbb88 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.482568] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b94bc5-0784-4bd5-8114-a7bd3cbf5e1d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.490299] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a03e7b5-2aad-4056-bda8-d370718b9d47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.503452] env[63371]: DEBUG nova.compute.provider_tree [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1398.536353] env[63371]: DEBUG nova.network.neutron [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updated VIF entry in instance network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.536733] env[63371]: DEBUG nova.network.neutron [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.672892] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Successfully created port: 386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1398.760115] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Updating instance_info_cache with network_info: [{"id": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "address": "fa:16:3e:1e:13:48", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap912c6f7c-cc", "ovs_interfaceid": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.843873] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773743, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.006790] env[63371]: DEBUG nova.scheduler.client.report [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1399.039743] env[63371]: DEBUG oslo_concurrency.lockutils [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.061628] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1399.093406] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1399.093656] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1399.093809] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1399.093993] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1399.094151] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1399.094298] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1399.094911] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1399.095044] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1399.095223] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1399.095417] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1399.095588] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1399.096790] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09017ddb-5e06-43dd-93a9-133b2ab4cf09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.107145] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c2c6e1-5b21-45c1-acbe-3cdaa9a8b951 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.154049] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Received event network-vif-plugged-912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.154292] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquiring lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.154497] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.154662] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.154871] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] No waiting events found dispatching network-vif-plugged-912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1399.154977] env[63371]: WARNING nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Received unexpected event network-vif-plugged-912c6f7c-cc28-4f29-a362-7a8079dcc422 for instance with vm_state building and task_state spawning. [ 1399.155141] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Received event network-changed-912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.155313] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Refreshing instance network info cache due to event network-changed-912c6f7c-cc28-4f29-a362-7a8079dcc422. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1399.155516] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquiring lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.262117] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.262462] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance network_info: |[{"id": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "address": "fa:16:3e:1e:13:48", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap912c6f7c-cc", "ovs_interfaceid": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1399.262774] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquired lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.262948] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Refreshing network info cache for port 912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1399.264250] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:13:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '912c6f7c-cc28-4f29-a362-7a8079dcc422', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1399.273300] env[63371]: DEBUG oslo.service.loopingcall [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.276126] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1399.276607] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-669c0fb5-b049-4b36-bfb6-01ad3d21871f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.299208] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1399.299208] env[63371]: value = "task-1773744" [ 1399.299208] env[63371]: _type = "Task" [ 1399.299208] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.307701] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773744, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.343819] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773743, 'name': ReconfigVM_Task, 'duration_secs': 1.282759} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.344160] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Reconfigured VM instance instance-0000001a to attach disk [datastore1] volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8/volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1399.349305] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf866f5c-950b-45b5-91b6-5fb03379e671 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.367552] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1399.367552] env[63371]: value = "task-1773745" [ 1399.367552] env[63371]: _type = "Task" [ 1399.367552] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.376247] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773745, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.512203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.475s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.514737] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.816s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.515034] env[63371]: DEBUG nova.objects.instance [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1399.547375] env[63371]: INFO nova.scheduler.client.report [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Deleted allocations for instance 362d8303-524a-457a-b8d9-2bad87fa816b [ 1399.563741] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Updated VIF entry in instance network info cache for port 912c6f7c-cc28-4f29-a362-7a8079dcc422. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1399.564130] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Updating instance_info_cache with network_info: [{"id": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "address": "fa:16:3e:1e:13:48", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap912c6f7c-cc", "ovs_interfaceid": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.810404] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773744, 'name': CreateVM_Task, 'duration_secs': 0.339631} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.810580] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.811288] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.811491] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.811761] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1399.812206] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b90f8ed-3676-465d-b312-9b9ebbd68d6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.816846] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1399.816846] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cad284-5c00-1acd-a5fb-0db4891dfe83" [ 1399.816846] env[63371]: _type = "Task" [ 1399.816846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.826606] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cad284-5c00-1acd-a5fb-0db4891dfe83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.877163] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773745, 'name': ReconfigVM_Task, 'duration_secs': 0.140544} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.877481] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1399.878149] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3f2dd5c-e08c-4e66-86ab-016df7588bed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.885275] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1399.885275] env[63371]: value = "task-1773746" [ 1399.885275] env[63371]: _type = "Task" [ 1399.885275] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.893921] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773746, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.056156] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.757s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.066346] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Releasing lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.066618] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1400.066992] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1400.066992] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.067105] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.067267] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1400.162608] env[63371]: DEBUG nova.compute.manager [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1400.162840] env[63371]: DEBUG nova.compute.manager [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1400.162973] env[63371]: DEBUG oslo_concurrency.lockutils [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.333287] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cad284-5c00-1acd-a5fb-0db4891dfe83, 'name': SearchDatastore_Task, 'duration_secs': 0.008709} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.333932] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.333932] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.334438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.334438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.334438] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1400.334908] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23436697-ad0a-4a04-b99f-817bef54dd3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.353170] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.353225] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1400.353978] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97f549f1-f850-4ebe-ad4b-ed85a96cd9b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.359483] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1400.359483] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e1e7e5-0eee-7424-88e2-c462fe387003" [ 1400.359483] env[63371]: _type = "Task" [ 1400.359483] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.367556] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e1e7e5-0eee-7424-88e2-c462fe387003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.394828] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773746, 'name': Rename_Task, 'duration_secs': 0.129224} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.395160] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.397231] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfeccbff-4dee-499e-b9da-7e2c75804b59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.404033] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1400.404033] env[63371]: value = "task-1773747" [ 1400.404033] env[63371]: _type = "Task" [ 1400.404033] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.410313] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.513391] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.513820] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.530027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.530027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.293s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.530027] env[63371]: DEBUG nova.objects.instance [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lazy-loading 'resources' on Instance uuid 7841ebd2-0c23-4e32-8b81-42311a32c6fd {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1400.790051] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Successfully updated port: 386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1400.873416] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e1e7e5-0eee-7424-88e2-c462fe387003, 'name': SearchDatastore_Task, 'duration_secs': 0.019746} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.874218] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34855aef-3b9b-4346-928b-42cc05b08f59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.880037] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1400.880037] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522b095f-2297-e222-17a7-9d3f39341f6b" [ 1400.880037] env[63371]: _type = "Task" [ 1400.880037] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.888000] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522b095f-2297-e222-17a7-9d3f39341f6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.914761] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773747, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.054504] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updated VIF entry in instance network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1401.055447] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.292878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.293149] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.293196] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1401.394262] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522b095f-2297-e222-17a7-9d3f39341f6b, 'name': SearchDatastore_Task, 'duration_secs': 0.010686} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.394516] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.395065] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0/dc6ef0a7-1744-4b90-b385-913cb796f7d0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1401.397361] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa57a445-ca87-4355-91b5-cf3f9cff2fc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.403420] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1401.403420] env[63371]: value = "task-1773748" [ 1401.403420] env[63371]: _type = "Task" [ 1401.403420] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.420343] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.420600] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773747, 'name': PowerOnVM_Task, 'duration_secs': 0.641327} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.420884] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.421209] env[63371]: INFO nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 6.25 seconds to spawn the instance on the hypervisor. [ 1401.421917] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1401.422294] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bafd88-d5aa-4456-b8c7-8180a2628e97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.508967] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Received event network-vif-plugged-386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1401.509213] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquiring lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.509457] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.509635] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.509868] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] No waiting events found dispatching network-vif-plugged-386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1401.510051] env[63371]: WARNING nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Received unexpected event network-vif-plugged-386f3dc5-c792-4979-a938-7ec61bb88563 for instance with vm_state building and task_state spawning. [ 1401.510215] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Received event network-changed-386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1401.510383] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Refreshing instance network info cache due to event network-changed-386f3dc5-c792-4979-a938-7ec61bb88563. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1401.510526] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquiring lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.558165] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.564044] env[63371]: DEBUG oslo_concurrency.lockutils [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.564340] env[63371]: DEBUG nova.network.neutron [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1401.653416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f16bf3-75bd-48af-b1a6-0317c941cf36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.661290] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdb660a-8cf5-415c-97e0-0ea853eeb764 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.701387] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afd27ce-f7aa-4b86-aa8a-dea97c650818 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.709597] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d826a3-c9d2-45b2-89a3-3563bf418e24 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.726030] env[63371]: DEBUG nova.compute.provider_tree [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.858395] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1401.919612] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773748, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.945380] env[63371]: INFO nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 48.95 seconds to build instance. [ 1402.113396] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updating instance_info_cache with network_info: [{"id": "386f3dc5-c792-4979-a938-7ec61bb88563", "address": "fa:16:3e:9c:b0:15", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f3dc5-c7", "ovs_interfaceid": "386f3dc5-c792-4979-a938-7ec61bb88563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.228973] env[63371]: DEBUG nova.scheduler.client.report [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1402.363258] env[63371]: DEBUG nova.network.neutron [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updated VIF entry in instance network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1402.363578] env[63371]: DEBUG nova.network.neutron [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.418669] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773748, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727668} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.418669] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0/dc6ef0a7-1744-4b90-b385-913cb796f7d0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1402.418669] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1402.418669] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b52f02a7-7fc1-47dc-b242-b27004a6e155 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.425235] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1402.425235] env[63371]: value = "task-1773749" [ 1402.425235] env[63371]: _type = "Task" [ 1402.425235] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.436449] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.447380] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.093s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.615997] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.616887] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance network_info: |[{"id": "386f3dc5-c792-4979-a938-7ec61bb88563", "address": "fa:16:3e:9c:b0:15", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f3dc5-c7", "ovs_interfaceid": "386f3dc5-c792-4979-a938-7ec61bb88563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1402.616887] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquired lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.617328] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Refreshing network info cache for port 386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1402.618116] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:b0:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '386f3dc5-c792-4979-a938-7ec61bb88563', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1402.629400] env[63371]: DEBUG oslo.service.loopingcall [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1402.630590] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1402.630974] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-167eaf6e-f73b-458a-a8c4-1ca908284347 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.654523] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1402.654523] env[63371]: value = "task-1773750" [ 1402.654523] env[63371]: _type = "Task" [ 1402.654523] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.664695] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773750, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.733931] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.204s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.736681] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.902s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.736875] env[63371]: DEBUG nova.objects.instance [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lazy-loading 'resources' on Instance uuid 1924d3d2-cc88-4fd2-b509-8463da796658 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1402.762961] env[63371]: INFO nova.scheduler.client.report [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Deleted allocations for instance 7841ebd2-0c23-4e32-8b81-42311a32c6fd [ 1402.867095] env[63371]: DEBUG oslo_concurrency.lockutils [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.935931] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06576} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.936239] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1402.937091] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d81178-b34c-4e94-aa36-bccedb99b43c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.957019] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1402.967294] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0/dc6ef0a7-1744-4b90-b385-913cb796f7d0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.968115] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-019d737d-4738-42a5-ab88-12548556e714 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.990863] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1402.990863] env[63371]: value = "task-1773751" [ 1402.990863] env[63371]: _type = "Task" [ 1402.990863] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.002238] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773751, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.054049] env[63371]: DEBUG nova.compute.manager [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received event network-changed-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1403.054251] env[63371]: DEBUG nova.compute.manager [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Refreshing instance network info cache due to event network-changed-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1403.054484] env[63371]: DEBUG oslo_concurrency.lockutils [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] Acquiring lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.054687] env[63371]: DEBUG oslo_concurrency.lockutils [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] Acquired lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.054821] env[63371]: DEBUG nova.network.neutron [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Refreshing network info cache for port 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1403.165311] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773750, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.274503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.226s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.369183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.369433] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.370657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.370657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.370657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.372623] env[63371]: INFO nova.compute.manager [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Terminating instance [ 1403.374398] env[63371]: DEBUG nova.compute.manager [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1403.374585] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1403.375545] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de97c9dc-881f-4d11-8ed3-5b401457ce12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.388457] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1403.388755] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99ad1241-acaa-4b9f-9084-b27fe7e03adc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.405789] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1403.405789] env[63371]: value = "task-1773752" [ 1403.405789] env[63371]: _type = "Task" [ 1403.405789] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.417742] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.442562] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updated VIF entry in instance network info cache for port 386f3dc5-c792-4979-a938-7ec61bb88563. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1403.442955] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updating instance_info_cache with network_info: [{"id": "386f3dc5-c792-4979-a938-7ec61bb88563", "address": "fa:16:3e:9c:b0:15", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f3dc5-c7", "ovs_interfaceid": "386f3dc5-c792-4979-a938-7ec61bb88563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.488970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.511703] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.665103] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773750, 'name': CreateVM_Task, 'duration_secs': 0.883692} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.665271] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1403.665976] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.666140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.666462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1403.666711] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9adfa1f0-5fac-4324-9f00-4319d3496f4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.675515] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1403.675515] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5239ca2d-d2d9-6ca8-3567-07f1561c74dd" [ 1403.675515] env[63371]: _type = "Task" [ 1403.675515] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.686802] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5239ca2d-d2d9-6ca8-3567-07f1561c74dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.859159] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46902a3d-094f-4384-8376-d764f7c27789 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.866623] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a733ad-45e8-4ad8-b41a-e550d56cafbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.902265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a034e9f5-eba7-4e34-bed1-be84e2161294 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.912968] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc7a724-ecf0-4bbe-b24f-6366090f9e53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.919631] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773752, 'name': PowerOffVM_Task, 'duration_secs': 0.241018} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.920267] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1403.920443] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1403.920685] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5a5734e-3efa-4f4d-b762-4f70cbb9ac9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.932524] env[63371]: DEBUG nova.compute.provider_tree [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1403.951100] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Releasing lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.951100] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1403.951100] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1403.951100] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.951100] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.951284] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.004489] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773751, 'name': ReconfigVM_Task, 'duration_secs': 0.600375} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.004755] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Reconfigured VM instance instance-0000001b to attach disk [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0/dc6ef0a7-1744-4b90-b385-913cb796f7d0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1404.005471] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8866e1c2-6fe3-4ce3-a2fb-c64c19c878a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.011441] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1404.011441] env[63371]: value = "task-1773754" [ 1404.011441] env[63371]: _type = "Task" [ 1404.011441] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.022364] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773754, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.025124] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1404.025319] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1404.025487] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Deleting the datastore file [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1404.025732] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d000c4d-125c-4da1-a430-763e77980b59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.033588] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1404.033588] env[63371]: value = "task-1773755" [ 1404.033588] env[63371]: _type = "Task" [ 1404.033588] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.041768] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.051389] env[63371]: DEBUG nova.network.neutron [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updated VIF entry in instance network info cache for port 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1404.051780] env[63371]: DEBUG nova.network.neutron [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating instance_info_cache with network_info: [{"id": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "address": "fa:16:3e:97:82:d5", "network": {"id": "f378570d-e12e-4a4b-b779-b22a48508774", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1065947988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf723c611d61478cbb81b2bc474a74f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5279ae43-ba", "ovs_interfaceid": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.185812] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5239ca2d-d2d9-6ca8-3567-07f1561c74dd, 'name': SearchDatastore_Task, 'duration_secs': 0.010238} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.186141] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.186418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.186697] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.186945] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.187203] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.187502] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dab7ee3d-b91b-4348-ad0f-cb25248cb9dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.195266] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.195493] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1404.196261] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cb5e2ee-8f88-4829-8930-409439b02c71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.202524] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1404.202524] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d8c8bc-f197-5db4-d5dd-ba8af7432e5e" [ 1404.202524] env[63371]: _type = "Task" [ 1404.202524] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.210474] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d8c8bc-f197-5db4-d5dd-ba8af7432e5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.436408] env[63371]: DEBUG nova.scheduler.client.report [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1404.522830] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773754, 'name': Rename_Task, 'duration_secs': 0.177066} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.523144] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.523704] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b7e4ac8-d738-49cf-9d14-b58c1cf831ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.531068] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1404.531068] env[63371]: value = "task-1773756" [ 1404.531068] env[63371]: _type = "Task" [ 1404.531068] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.541958] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.544980] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231739} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.547270] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1404.548392] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1404.548392] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1404.548392] env[63371]: INFO nova.compute.manager [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1404.548392] env[63371]: DEBUG oslo.service.loopingcall [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.548392] env[63371]: DEBUG nova.compute.manager [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1404.548392] env[63371]: DEBUG nova.network.neutron [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1404.554675] env[63371]: DEBUG oslo_concurrency.lockutils [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] Releasing lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.716202] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d8c8bc-f197-5db4-d5dd-ba8af7432e5e, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.719099] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ce27633-457a-4907-bac2-b08970b83015 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.730365] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1404.730365] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bca30f-1ec6-a5b9-a467-276c963c3994" [ 1404.730365] env[63371]: _type = "Task" [ 1404.730365] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.738617] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bca30f-1ec6-a5b9-a467-276c963c3994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.789956] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updated VIF entry in instance network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1404.789956] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.941284] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.205s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.944375] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.343s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.946912] env[63371]: INFO nova.compute.claims [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1404.968819] env[63371]: INFO nova.scheduler.client.report [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted allocations for instance 1924d3d2-cc88-4fd2-b509-8463da796658 [ 1404.992524] env[63371]: DEBUG nova.compute.manager [req-55631f43-f246-46b2-8642-c5ae1b1db542 req-fa07694c-f1c0-4d7a-b451-7cfa63d8c63e service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-vif-deleted-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1404.992721] env[63371]: INFO nova.compute.manager [req-55631f43-f246-46b2-8642-c5ae1b1db542 req-fa07694c-f1c0-4d7a-b451-7cfa63d8c63e service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Neutron deleted interface fbd3a7d0-068b-4df5-be7f-d8bf5fe260de; detaching it from the instance and deleting it from the info cache [ 1404.992885] env[63371]: DEBUG nova.network.neutron [req-55631f43-f246-46b2-8642-c5ae1b1db542 req-fa07694c-f1c0-4d7a-b451-7cfa63d8c63e service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.043223] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773756, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.241349] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bca30f-1ec6-a5b9-a467-276c963c3994, 'name': SearchDatastore_Task, 'duration_secs': 0.008677} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.241616] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.241868] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d/852e14a7-2f9f-421c-9804-56c885885c7d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1405.242143] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e325ce6f-a7f4-4f1f-9a0e-5a45abb0f279 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.248923] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1405.248923] env[63371]: value = "task-1773757" [ 1405.248923] env[63371]: _type = "Task" [ 1405.248923] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.256993] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.295780] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.427488] env[63371]: DEBUG nova.network.neutron [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.478544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.441s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.499071] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69261fb4-686b-46f0-b228-c6fe2c7da055 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.508171] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940970cf-1bbe-4860-aa87-841719a7d2fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.546204] env[63371]: DEBUG nova.compute.manager [req-55631f43-f246-46b2-8642-c5ae1b1db542 req-fa07694c-f1c0-4d7a-b451-7cfa63d8c63e service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Detach interface failed, port_id=fbd3a7d0-068b-4df5-be7f-d8bf5fe260de, reason: Instance 713dfaf5-d11f-4af2-af92-66a596b0ed4a could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1405.556579] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773756, 'name': PowerOnVM_Task, 'duration_secs': 0.856797} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.557143] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.557472] env[63371]: INFO nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 9.21 seconds to spawn the instance on the hypervisor. [ 1405.557809] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1405.558734] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a5e050-c674-4aad-8506-cbd28a7692a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.758426] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773757, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.930622] env[63371]: INFO nova.compute.manager [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Took 1.38 seconds to deallocate network for instance. [ 1406.083434] env[63371]: INFO nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 51.50 seconds to build instance. [ 1406.259540] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531749} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.259862] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d/852e14a7-2f9f-421c-9804-56c885885c7d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1406.260113] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1406.260377] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-196a9780-8e0a-4729-a516-93597f35c6a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.270309] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1406.270309] env[63371]: value = "task-1773758" [ 1406.270309] env[63371]: _type = "Task" [ 1406.270309] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.279905] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773758, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.391715] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.391970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.392202] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.392389] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.392549] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.395233] env[63371]: INFO nova.compute.manager [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Terminating instance [ 1406.398667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "refresh_cache-cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.398667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "refresh_cache-cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.398667] env[63371]: DEBUG nova.network.neutron [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1406.438015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.494847] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2154c5-c023-4e0c-8323-c097f7d6a722 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.504019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb656da-4c17-4b70-8e4f-cc33c20d6b73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.535100] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05427ad4-143d-4e55-8121-31c6f28cfde1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.542977] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171eeb49-e6fe-4336-aa8b-0c3af0c45571 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.556344] env[63371]: DEBUG nova.compute.provider_tree [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1406.587986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.781401] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773758, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072881} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.781668] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1406.782485] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1ef9d9-b7ba-4c07-87a7-cab36dd93b6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.805125] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d/852e14a7-2f9f-421c-9804-56c885885c7d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1406.805125] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08a4a841-c2f9-46ed-9ea6-f5eb9833b248 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.825094] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1406.825094] env[63371]: value = "task-1773759" [ 1406.825094] env[63371]: _type = "Task" [ 1406.825094] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.836603] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773759, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.918531] env[63371]: DEBUG nova.network.neutron [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1406.992614] env[63371]: DEBUG nova.network.neutron [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.062032] env[63371]: DEBUG nova.scheduler.client.report [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1407.096269] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1407.338875] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773759, 'name': ReconfigVM_Task, 'duration_secs': 0.283502} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.339148] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d/852e14a7-2f9f-421c-9804-56c885885c7d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1407.340700] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94ce64ba-087b-48ee-a6e0-46cc633b340e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.347508] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1407.347508] env[63371]: value = "task-1773760" [ 1407.347508] env[63371]: _type = "Task" [ 1407.347508] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.355919] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773760, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.495559] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "refresh_cache-cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.496487] env[63371]: DEBUG nova.compute.manager [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1407.496735] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1407.497640] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66642b77-d584-4586-bab7-bb0040f1d225 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.507682] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1407.508598] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ad176cb-8f99-474b-9780-de3f39038ca1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.519137] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1407.519137] env[63371]: value = "task-1773761" [ 1407.519137] env[63371]: _type = "Task" [ 1407.519137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.525295] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773761, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.568019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.621s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.568019] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1407.568852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.915s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.570990] env[63371]: INFO nova.compute.claims [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1407.628368] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.861804] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773760, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.974328] env[63371]: DEBUG nova.compute.manager [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1407.975318] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471c2393-8715-4384-96b8-b57aa9aba811 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.029779] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773761, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.079223] env[63371]: DEBUG nova.compute.utils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1408.080345] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1408.080345] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1408.187320] env[63371]: DEBUG nova.policy [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5bee716ea542f9a463941fa477a897', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9d19f4772ff46d3b3024851822cf833', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1408.365886] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773760, 'name': Rename_Task, 'duration_secs': 0.850859} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.366326] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1408.366586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58ae7bfe-9f15-4d5d-a3e1-bcb0b6b9e48d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.376013] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1408.376013] env[63371]: value = "task-1773762" [ 1408.376013] env[63371]: _type = "Task" [ 1408.376013] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.386476] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.493043] env[63371]: INFO nova.compute.manager [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] instance snapshotting [ 1408.496390] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ee59d5-b428-45e9-8c56-8f4a4166be5c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.528104] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef287ac-5d53-467a-ba3f-703875f1be51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.536385] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773761, 'name': PowerOffVM_Task, 'duration_secs': 0.742507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.539701] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1408.539701] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1408.543500] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-154da4a1-d532-4415-b6c9-8cc7a07a70a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.563718] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Successfully created port: 1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1408.572929] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1408.574095] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1408.574095] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleting the datastore file [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1408.574095] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f44a5fc-b635-4a8c-9a63-746ec9449abc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.585026] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1408.585026] env[63371]: value = "task-1773764" [ 1408.585026] env[63371]: _type = "Task" [ 1408.585026] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.585026] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1408.601352] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.891058] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773762, 'name': PowerOnVM_Task, 'duration_secs': 0.489147} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.893867] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.894190] env[63371]: INFO nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Took 9.83 seconds to spawn the instance on the hypervisor. [ 1408.894389] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1408.895779] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4533151-8005-4a0c-ac54-03ff79c20cd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.046884] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1409.047234] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8ac2f637-1be8-4e96-8f63-d2608428b65a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.059569] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1409.059569] env[63371]: value = "task-1773765" [ 1409.059569] env[63371]: _type = "Task" [ 1409.059569] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.069512] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773765, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.105472] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109473} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.105472] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1409.105472] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1409.105472] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1409.105696] env[63371]: INFO nova.compute.manager [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1409.105865] env[63371]: DEBUG oslo.service.loopingcall [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1409.106788] env[63371]: DEBUG nova.compute.manager [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1409.106788] env[63371]: DEBUG nova.network.neutron [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1409.135104] env[63371]: DEBUG nova.network.neutron [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1409.256336] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a2c58c-d20e-4970-8f53-b4cb1f228c18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.269266] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7502ca03-b4c5-4c53-ae15-b7cfeaba2719 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.303240] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14b7bc7-ec00-4b25-bc31-88af381e6dff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.311816] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442de87-883f-4f62-84d2-e2c17a740b4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.327042] env[63371]: DEBUG nova.compute.provider_tree [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1409.418825] env[63371]: INFO nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Took 47.23 seconds to build instance. [ 1409.571818] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773765, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.602620] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1409.633601] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1409.633939] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1409.634170] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1409.634506] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1409.634642] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1409.634841] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1409.635477] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1409.635477] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1409.635601] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1409.635824] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1409.636542] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1409.637819] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fb0527-96e0-4fe0-b264-3f0ad77c9003 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.642307] env[63371]: DEBUG nova.network.neutron [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.650727] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a8288c-c781-4b68-b397-65ba61f52971 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.827913] env[63371]: DEBUG nova.scheduler.client.report [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1409.877071] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.877310] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.922556] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.697s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.075651] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773765, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.145067] env[63371]: INFO nova.compute.manager [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Took 1.04 seconds to deallocate network for instance. [ 1410.146249] env[63371]: DEBUG nova.compute.manager [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Received event network-vif-plugged-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1410.146464] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] Acquiring lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.146669] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.146829] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.146991] env[63371]: DEBUG nova.compute.manager [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] No waiting events found dispatching network-vif-plugged-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1410.147395] env[63371]: WARNING nova.compute.manager [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Received unexpected event network-vif-plugged-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 for instance with vm_state building and task_state spawning. [ 1410.238422] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Successfully updated port: 1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1410.247426] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "852e14a7-2f9f-421c-9804-56c885885c7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.247700] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.247931] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.248246] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.248371] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.251966] env[63371]: INFO nova.compute.manager [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Terminating instance [ 1410.254114] env[63371]: DEBUG nova.compute.manager [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1410.254377] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1410.256115] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4fe6a2-1a6c-4c2b-a9c0-163387e80be1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.266123] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1410.266384] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db1dff37-30f8-4706-9e1a-977572de3369 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.274446] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1410.274446] env[63371]: value = "task-1773766" [ 1410.274446] env[63371]: _type = "Task" [ 1410.274446] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.283342] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.338019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.338019] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1410.339287] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.960s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.341315] env[63371]: INFO nova.compute.claims [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1410.423623] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1410.575124] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773765, 'name': CreateSnapshot_Task, 'duration_secs': 1.349315} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.575458] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1410.576328] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c803ff7-fe18-4daa-aeb7-7613bb7936a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.655061] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.744460] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.744460] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.744460] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1410.784184] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773766, 'name': PowerOffVM_Task, 'duration_secs': 0.201842} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.784657] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1410.784657] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1410.784907] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-957d5213-a9d0-4027-ad19-87ed8b7e5171 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.848029] env[63371]: DEBUG nova.compute.utils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1410.850908] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1410.851112] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1410.854859] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1410.855073] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1410.855251] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleting the datastore file [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1410.855827] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26c824a1-44ed-4f66-ae1e-d937a1f86bc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.863276] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1410.863276] env[63371]: value = "task-1773768" [ 1410.863276] env[63371]: _type = "Task" [ 1410.863276] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.873682] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.944868] env[63371]: DEBUG nova.policy [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1410.949382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.096481] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1411.096906] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fb19541b-fc27-4b4f-bb91-7b7f3d021662 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.106477] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1411.106477] env[63371]: value = "task-1773769" [ 1411.106477] env[63371]: _type = "Task" [ 1411.106477] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.114910] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.289294] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1411.316311] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Successfully created port: d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.358126] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1411.374821] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149927} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.375063] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1411.375262] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1411.375455] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1411.375589] env[63371]: INFO nova.compute.manager [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1411.375857] env[63371]: DEBUG oslo.service.loopingcall [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1411.375993] env[63371]: DEBUG nova.compute.manager [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1411.376092] env[63371]: DEBUG nova.network.neutron [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1411.626558] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.643905] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Updating instance_info_cache with network_info: [{"id": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "address": "fa:16:3e:92:7a:23", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b8eba67-08", "ovs_interfaceid": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.982039] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bba1b2b-bd2b-4b88-a02d-4ed011bcc07e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.988538] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbf2eb2-9b60-4ddf-9c65-1c9c2e5f1f7a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.021299] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4070a6-5e21-4551-8b57-8ab96871e08c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.028935] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50268838-edd9-4889-9d0e-3292f90f2f63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.042425] env[63371]: DEBUG nova.compute.provider_tree [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1412.117292] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.152895] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.152895] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance network_info: |[{"id": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "address": "fa:16:3e:92:7a:23", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b8eba67-08", "ovs_interfaceid": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1412.153220] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:7a:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b8eba67-08ba-47de-bad7-2e38e4a7ea31', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1412.158689] env[63371]: DEBUG oslo.service.loopingcall [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.158907] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1412.159200] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de02b9b1-8f9f-4d01-ae65-64a47abed162 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.176559] env[63371]: DEBUG nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Received event network-changed-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1412.176753] env[63371]: DEBUG nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Refreshing instance network info cache due to event network-changed-1b8eba67-08ba-47de-bad7-2e38e4a7ea31. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1412.176967] env[63371]: DEBUG oslo_concurrency.lockutils [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] Acquiring lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.177127] env[63371]: DEBUG oslo_concurrency.lockutils [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] Acquired lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.177285] env[63371]: DEBUG nova.network.neutron [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Refreshing network info cache for port 1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1412.179942] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1412.179942] env[63371]: value = "task-1773770" [ 1412.179942] env[63371]: _type = "Task" [ 1412.179942] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.188685] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773770, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.372569] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1412.399391] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1412.399646] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1412.399823] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.400095] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1412.400298] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.400450] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1412.400659] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1412.400858] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1412.401066] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1412.401236] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1412.401410] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1412.402291] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e7ebb5-64ed-4077-86c4-80b7e99ddd16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.410431] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1513633c-8dad-49fb-9f8a-9a6599912949 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.414209] env[63371]: DEBUG nova.network.neutron [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.548843] env[63371]: DEBUG nova.scheduler.client.report [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1412.618220] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.691941] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773770, 'name': CreateVM_Task, 'duration_secs': 0.508306} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.692274] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1412.693031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.693031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.693353] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1412.693627] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-112de93d-5fca-433e-80a1-8a2fd49096da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.698423] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1412.698423] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c73ba7-ccc6-504e-b44f-1de65f5a5f4c" [ 1412.698423] env[63371]: _type = "Task" [ 1412.698423] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.706993] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c73ba7-ccc6-504e-b44f-1de65f5a5f4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.917663] env[63371]: INFO nova.compute.manager [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Took 1.54 seconds to deallocate network for instance. [ 1412.999296] env[63371]: DEBUG nova.network.neutron [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Updated VIF entry in instance network info cache for port 1b8eba67-08ba-47de-bad7-2e38e4a7ea31. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1412.999641] env[63371]: DEBUG nova.network.neutron [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Updating instance_info_cache with network_info: [{"id": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "address": "fa:16:3e:92:7a:23", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b8eba67-08", "ovs_interfaceid": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.053790] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.054341] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1413.057406] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 29.923s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.118147] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task, 'duration_secs': 1.635374} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.118426] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created linked-clone VM from snapshot [ 1413.119192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4f790a-e312-4e23-9bee-130a5b4d3463 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.126483] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Uploading image 67c79a69-90fa-469e-b65b-470387ba8d71 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1413.154603] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1413.154603] env[63371]: value = "vm-368279" [ 1413.154603] env[63371]: _type = "VirtualMachine" [ 1413.154603] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1413.154873] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-19023291-0f48-4224-a6b7-76af45716180 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.162299] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease: (returnval){ [ 1413.162299] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.162299] env[63371]: _type = "HttpNfcLease" [ 1413.162299] env[63371]: } obtained for exporting VM: (result){ [ 1413.162299] env[63371]: value = "vm-368279" [ 1413.162299] env[63371]: _type = "VirtualMachine" [ 1413.162299] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1413.162534] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the lease: (returnval){ [ 1413.162534] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.162534] env[63371]: _type = "HttpNfcLease" [ 1413.162534] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1413.168877] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1413.168877] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.168877] env[63371]: _type = "HttpNfcLease" [ 1413.168877] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1413.208851] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c73ba7-ccc6-504e-b44f-1de65f5a5f4c, 'name': SearchDatastore_Task, 'duration_secs': 0.009679} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.209166] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.209397] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1413.209628] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.209782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.210015] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1413.210293] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91616435-cdcf-4bd6-8156-f39f0cb9ef71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.230145] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1413.230340] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1413.231108] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b82e7c3-ca71-4b6f-857f-347421b14d39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.236543] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1413.236543] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bde76a-c398-79b8-ab68-082b83670fb1" [ 1413.236543] env[63371]: _type = "Task" [ 1413.236543] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.244550] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bde76a-c398-79b8-ab68-082b83670fb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.424457] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.492417] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Successfully updated port: d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1413.502645] env[63371]: DEBUG oslo_concurrency.lockutils [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] Releasing lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.502896] env[63371]: DEBUG nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Received event network-vif-deleted-386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1413.503147] env[63371]: INFO nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Neutron deleted interface 386f3dc5-c792-4979-a938-7ec61bb88563; detaching it from the instance and deleting it from the info cache [ 1413.503332] env[63371]: DEBUG nova.network.neutron [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.562554] env[63371]: DEBUG nova.compute.utils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1413.569185] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1413.569502] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1413.632599] env[63371]: DEBUG nova.policy [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4302f381e0948438b9ee23a33a0f982', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35882164a8734563a006675f2ec6ba71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1413.675157] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1413.675157] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.675157] env[63371]: _type = "HttpNfcLease" [ 1413.675157] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1413.677286] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1413.677286] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.677286] env[63371]: _type = "HttpNfcLease" [ 1413.677286] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1413.678294] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6b154e-07e1-4789-a169-19b0bf6537ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.689053] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1413.689252] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1413.762841] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bde76a-c398-79b8-ab68-082b83670fb1, 'name': SearchDatastore_Task, 'duration_secs': 0.00877} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.762841] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c821bec-3d94-428d-b53e-f8cfc4e7252d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.767811] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1413.767811] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b769d1-345e-6b4a-1396-5d92915b42bd" [ 1413.767811] env[63371]: _type = "Task" [ 1413.767811] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.779113] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b769d1-345e-6b4a-1396-5d92915b42bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.787755] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6705bacc-4f31-4f90-99ae-12c059800ed6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.962584] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Successfully created port: b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.994124] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.994267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.994409] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.014187] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1cfd5152-536b-4235-9623-35df7e24b4d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.027052] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3c65fc-e8d5-4912-8a44-bf9a88f0c751 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.071630] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1414.074503] env[63371]: DEBUG nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Detach interface failed, port_id=386f3dc5-c792-4979-a938-7ec61bb88563, reason: Instance 852e14a7-2f9f-421c-9804-56c885885c7d could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1414.195280] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7d2488-6c1b-4b4b-b3fe-5dd0c6b1557e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.203826] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da547c8a-0e0d-40e4-94a7-daccec1edfe0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.246483] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93db7e32-165c-4738-8ab3-81b06fc6987e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.255559] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fb71b0-1cd7-4c63-8e56-abafca856a13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.271097] env[63371]: DEBUG nova.compute.provider_tree [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1414.285119] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b769d1-345e-6b4a-1396-5d92915b42bd, 'name': SearchDatastore_Task, 'duration_secs': 0.009726} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.285387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.285741] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6/4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1414.288166] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-771dc7e6-4e9a-401b-a406-d9855bd23536 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.294673] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1414.294673] env[63371]: value = "task-1773772" [ 1414.294673] env[63371]: _type = "Task" [ 1414.294673] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.304515] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.312256] env[63371]: DEBUG nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-vif-plugged-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1414.313047] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.313126] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.313300] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.313530] env[63371]: DEBUG nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] No waiting events found dispatching network-vif-plugged-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1414.313693] env[63371]: WARNING nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received unexpected event network-vif-plugged-d92b8632-8794-486c-a8eb-5c8844009035 for instance with vm_state building and task_state spawning. [ 1414.313804] env[63371]: DEBUG nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-changed-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1414.314119] env[63371]: DEBUG nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing instance network info cache due to event network-changed-d92b8632-8794-486c-a8eb-5c8844009035. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1414.314292] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.528194] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.737924] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.780041] env[63371]: DEBUG nova.scheduler.client.report [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1414.809369] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773772, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.085078] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1415.111361] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1415.111710] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1415.111963] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1415.112189] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1415.112399] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1415.112558] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1415.112841] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1415.113080] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1415.113347] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1415.113513] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1415.113682] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1415.114677] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae070f0-31df-40d5-8d0a-4d07d4a4e763 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.122907] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269fe18f-2c69-4ac4-a2c3-b7baf0c6883a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.244061] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.244497] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance network_info: |[{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1415.244861] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.244982] env[63371]: DEBUG nova.network.neutron [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing network info cache for port d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1415.246419] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:79:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd92b8632-8794-486c-a8eb-5c8844009035', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.254138] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating folder: Project (5a5897667b6b47deb7ff5b64f9499f36). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.255443] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e67433d7-f17b-4f88-baea-6b5b8b1d19ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.266814] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created folder: Project (5a5897667b6b47deb7ff5b64f9499f36) in parent group-v368199. [ 1415.266995] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating folder: Instances. Parent ref: group-v368281. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.267239] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06e32a3f-aa9f-4428-8a4f-053e74936309 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.277055] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created folder: Instances in parent group-v368281. [ 1415.277298] env[63371]: DEBUG oslo.service.loopingcall [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.277521] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.277766] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2d8beec-2da0-4e71-bb78-0486ad9886b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.302784] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1415.302784] env[63371]: value = "task-1773775" [ 1415.302784] env[63371]: _type = "Task" [ 1415.302784] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.310133] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766508} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.310889] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6/4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1415.311149] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1415.312778] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44a1c836-a74c-4fd2-9faa-cf7cbb591ae5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.318238] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.323099] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1415.323099] env[63371]: value = "task-1773776" [ 1415.323099] env[63371]: _type = "Task" [ 1415.323099] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.331989] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773776, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.656526] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Successfully updated port: b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1415.798512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.741s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.802226] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.473s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.807701] env[63371]: INFO nova.compute.claims [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.831549] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.837596] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773776, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077667} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.837596] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1415.838148] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026c0cfa-a386-4d4d-be8b-6923d7f80309 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.867409] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6/4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.869237] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d79aa80e-1466-4d1d-a9b8-07facc18784d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.899123] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1415.899123] env[63371]: value = "task-1773777" [ 1415.899123] env[63371]: _type = "Task" [ 1415.899123] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.907275] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773777, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.052102] env[63371]: DEBUG nova.network.neutron [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updated VIF entry in instance network info cache for port d92b8632-8794-486c-a8eb-5c8844009035. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1416.052464] env[63371]: DEBUG nova.network.neutron [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.160054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.160054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.160241] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1416.334351] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.382270] env[63371]: INFO nova.scheduler.client.report [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Deleted allocation for migration e496466e-2a3b-442c-9adb-941ce7e06a5e [ 1416.408516] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.416762] env[63371]: DEBUG nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received event network-vif-plugged-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1416.416976] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.417524] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.417524] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.417524] env[63371]: DEBUG nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] No waiting events found dispatching network-vif-plugged-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1416.417716] env[63371]: WARNING nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received unexpected event network-vif-plugged-b03ddfde-3b36-43a8-8c6a-00cd704bce22 for instance with vm_state building and task_state spawning. [ 1416.417936] env[63371]: DEBUG nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received event network-changed-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1416.418028] env[63371]: DEBUG nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Refreshing instance network info cache due to event network-changed-b03ddfde-3b36-43a8-8c6a-00cd704bce22. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1416.418163] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Acquiring lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.555718] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.706319] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1416.837388] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.890133] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 36.765s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.909185] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.934795] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating instance_info_cache with network_info: [{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.333086] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task, 'duration_secs': 1.6653} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.333276] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1417.333964] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.337782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.337782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1417.337782] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d476bbe8-8cd6-430a-b8f9-d49a31e8dd10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.340845] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1417.340845] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522d3c90-dec2-d051-8f13-94bd82c205ba" [ 1417.340845] env[63371]: _type = "Task" [ 1417.340845] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.350938] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522d3c90-dec2-d051-8f13-94bd82c205ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.410967] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773777, 'name': ReconfigVM_Task, 'duration_secs': 1.425376} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.410967] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6/4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1417.411839] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d86df70-c146-473f-8a1a-a1909f977e4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.419113] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1417.419113] env[63371]: value = "task-1773778" [ 1417.419113] env[63371]: _type = "Task" [ 1417.419113] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.426948] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773778, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.439902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.439902] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Instance network_info: |[{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1417.441293] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef0855c-eada-40bb-8a0a-4d85f8d9a12a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.443583] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Acquired lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.443786] env[63371]: DEBUG nova.network.neutron [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Refreshing network info cache for port b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1417.445065] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:8f:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b03ddfde-3b36-43a8-8c6a-00cd704bce22', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1417.453420] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating folder: Project (35882164a8734563a006675f2ec6ba71). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1417.454698] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef551d84-f33c-4bd2-a0f4-8640460f1b55 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.461850] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84017d0-42d9-4abd-9a04-a797e11ff587 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.466573] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created folder: Project (35882164a8734563a006675f2ec6ba71) in parent group-v368199. [ 1417.466766] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating folder: Instances. Parent ref: group-v368284. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1417.467012] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9291d36-c3ce-4bd3-aafb-a41af8a1cb39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.493851] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4a9aa6-c4d9-494c-b63f-b8f1f1273b41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.502015] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde8e194-3003-4eab-9648-8a2849a4dd6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.507242] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created folder: Instances in parent group-v368284. [ 1417.507504] env[63371]: DEBUG oslo.service.loopingcall [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1417.508168] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1417.509030] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-104a0c3b-84ec-46ce-80a1-ac5da9d3ade1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.531320] env[63371]: DEBUG nova.compute.provider_tree [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.539369] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1417.539369] env[63371]: value = "task-1773781" [ 1417.539369] env[63371]: _type = "Task" [ 1417.539369] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.549312] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773781, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.862256] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522d3c90-dec2-d051-8f13-94bd82c205ba, 'name': SearchDatastore_Task, 'duration_secs': 0.012532} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.862635] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.862887] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.863111] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.863258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.863433] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1417.863698] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0784ba7a-7bf8-4f16-9fee-1f7ef3945c59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.879743] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1417.880131] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1417.884864] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29a06dd7-571a-48c1-afa1-998339e6b836 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.895656] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1417.895656] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca2347-90a5-4ef6-e2c9-a4168249c7aa" [ 1417.895656] env[63371]: _type = "Task" [ 1417.895656] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.917800] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca2347-90a5-4ef6-e2c9-a4168249c7aa, 'name': SearchDatastore_Task, 'duration_secs': 0.012017} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.918450] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d36d923a-e4d7-4c51-8720-14ac14ec14cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.941430] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1417.941430] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5244365c-7a31-d35c-8ba6-da9cc91110ba" [ 1417.941430] env[63371]: _type = "Task" [ 1417.941430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.953822] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773778, 'name': Rename_Task, 'duration_secs': 0.182312} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.956548] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1417.957271] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9def46f0-902e-4a86-8bfc-f2755385750e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.963781] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5244365c-7a31-d35c-8ba6-da9cc91110ba, 'name': SearchDatastore_Task, 'duration_secs': 0.012308} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.964435] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.964695] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05/7e463dd7-84a6-4e6d-ae8f-0860e3a20f05.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1417.964956] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-097c7ad8-b95a-45ee-b21c-9b9365cb829e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.969377] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1417.969377] env[63371]: value = "task-1773782" [ 1417.969377] env[63371]: _type = "Task" [ 1417.969377] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.973922] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1417.973922] env[63371]: value = "task-1773783" [ 1417.973922] env[63371]: _type = "Task" [ 1417.973922] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.980182] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773782, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.985528] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773783, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.034219] env[63371]: DEBUG nova.scheduler.client.report [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1418.053545] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773781, 'name': CreateVM_Task, 'duration_secs': 0.479732} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.053748] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1418.054485] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.054646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.054959] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1418.055253] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bc5981c-d16f-4aa1-b5ef-62ebd7ee6920 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.059973] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1418.059973] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9a610-3cc9-e16d-570a-1401d1514de1" [ 1418.059973] env[63371]: _type = "Task" [ 1418.059973] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.068645] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9a610-3cc9-e16d-570a-1401d1514de1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.235116] env[63371]: DEBUG nova.network.neutron [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updated VIF entry in instance network info cache for port b03ddfde-3b36-43a8-8c6a-00cd704bce22. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1418.235589] env[63371]: DEBUG nova.network.neutron [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating instance_info_cache with network_info: [{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.443034] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.443437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.482155] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773782, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.487226] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773783, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.539340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.737s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.540658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.924s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.542736] env[63371]: INFO nova.compute.claims [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1418.571863] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9a610-3cc9-e16d-570a-1401d1514de1, 'name': SearchDatastore_Task, 'duration_secs': 0.016419} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.572270] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.572482] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1418.572730] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.572870] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.573073] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1418.573712] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66ea5130-c52d-45aa-b0cc-fcb571f9e870 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.587101] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1418.587336] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1418.588139] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75cec582-006d-436d-bfbf-648432eebe53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.594243] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1418.594243] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba1797-7284-e595-2804-531c3625a187" [ 1418.594243] env[63371]: _type = "Task" [ 1418.594243] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.605341] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba1797-7284-e595-2804-531c3625a187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.738763] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Releasing lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.983463] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773782, 'name': PowerOnVM_Task, 'duration_secs': 0.860292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.985543] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1418.985791] env[63371]: INFO nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Took 9.38 seconds to spawn the instance on the hypervisor. [ 1418.986015] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1418.989581] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae445254-d264-4017-b6e8-e5a4aca8907a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.992057] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773783, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661202} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.992305] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05/7e463dd7-84a6-4e6d-ae8f-0860e3a20f05.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1418.992509] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1418.993104] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9beec18-74bb-47cb-8b89-fb2c19ce077b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.003828] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1419.003828] env[63371]: value = "task-1773784" [ 1419.003828] env[63371]: _type = "Task" [ 1419.003828] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.013035] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773784, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.049281] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "d6de04c1-3475-4ac1-8e17-b5905ca9b7a9" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.049562] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "d6de04c1-3475-4ac1-8e17-b5905ca9b7a9" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.104992] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba1797-7284-e595-2804-531c3625a187, 'name': SearchDatastore_Task, 'duration_secs': 0.02289} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.105677] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa33deac-4aef-42d8-b609-1da94bc68280 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.110730] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1419.110730] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bdaad1-9ffb-d29c-33a3-61e38b19e9d8" [ 1419.110730] env[63371]: _type = "Task" [ 1419.110730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.118117] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bdaad1-9ffb-d29c-33a3-61e38b19e9d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.511880] env[63371]: INFO nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Took 50.93 seconds to build instance. [ 1419.518916] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773784, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065839} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.519187] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1419.519989] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e231729f-e978-4e29-ad32-02bc592977be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.543240] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05/7e463dd7-84a6-4e6d-ae8f-0860e3a20f05.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.543731] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a39c89f0-b284-46e3-b17c-e1b61f16cdd3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.560879] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "d6de04c1-3475-4ac1-8e17-b5905ca9b7a9" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.511s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.561335] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1419.570538] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1419.570538] env[63371]: value = "task-1773785" [ 1419.570538] env[63371]: _type = "Task" [ 1419.570538] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.624730] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bdaad1-9ffb-d29c-33a3-61e38b19e9d8, 'name': SearchDatastore_Task, 'duration_secs': 0.040512} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.624995] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.625265] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e/44cc8606-24f5-4f6b-b96f-3559c9c3f06e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1419.625527] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-480825fc-23d5-48c0-b14f-a63a712c56f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.633646] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1419.633646] env[63371]: value = "task-1773786" [ 1419.633646] env[63371]: _type = "Task" [ 1419.633646] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.644843] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.890884] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafcceba-af1a-4b41-b401-ad41bf096811 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.898024] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Suspending the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1419.898024] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3cf1c19f-12e4-4f4f-af44-b444cc6ecbaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.906747] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Waiting for the task: (returnval){ [ 1419.906747] env[63371]: value = "task-1773787" [ 1419.906747] env[63371]: _type = "Task" [ 1419.906747] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.917718] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Task: {'id': task-1773787, 'name': SuspendVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.016203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.073s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.068316] env[63371]: DEBUG nova.compute.utils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1420.070379] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1420.070557] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1420.083628] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.086675] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6bedae-2cb6-44e2-b89b-cf3d13213883 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.095114] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94d4ed7-ddf5-4158-80df-0c7c8b473f8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.126631] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9109301c-1bd6-48e8-8815-7386dd3e973e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.130165] env[63371]: DEBUG nova.policy [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1819eca41829451e9a866d7e34cbe801', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1bda622ab1474b76a46a5ba68977188b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1420.140758] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b7f4f5-2662-465c-857d-7d3fa4bcc90a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.151668] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773786, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.159732] env[63371]: DEBUG nova.compute.provider_tree [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.424882] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Task: {'id': task-1773787, 'name': SuspendVM_Task} progress is 62%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.488458] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Successfully created port: 292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1420.520024] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1420.575170] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1420.590275] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.644378] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773786, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.663158] env[63371]: DEBUG nova.scheduler.client.report [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1420.918450] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Task: {'id': task-1773787, 'name': SuspendVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.043453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.091142] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773785, 'name': ReconfigVM_Task, 'duration_secs': 1.342121} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.091871] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05/7e463dd7-84a6-4e6d-ae8f-0860e3a20f05.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1421.092288] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e964421-06b9-4236-9139-f85d0184d092 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.098905] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1421.098905] env[63371]: value = "task-1773788" [ 1421.098905] env[63371]: _type = "Task" [ 1421.098905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.107905] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773788, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.144036] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773786, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.020922} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.144298] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e/44cc8606-24f5-4f6b-b96f-3559c9c3f06e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1421.144507] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1421.144822] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d3fc1bf-12c5-42d1-9bba-5fdd13d83b87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.153364] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1421.153364] env[63371]: value = "task-1773789" [ 1421.153364] env[63371]: _type = "Task" [ 1421.153364] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.163015] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773789, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.171047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.171585] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1421.174708] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.751s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.419920] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Task: {'id': task-1773787, 'name': SuspendVM_Task, 'duration_secs': 1.028245} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.420233] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Suspended the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1421.420416] env[63371]: DEBUG nova.compute.manager [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1421.421238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa0211d-53ad-4a28-b3bc-56463e819614 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.594063] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1421.609522] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773788, 'name': Rename_Task, 'duration_secs': 0.19632} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.609825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1421.610118] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ba28dfd-1a52-477c-969b-b683b29feeef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.618331] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1421.618331] env[63371]: value = "task-1773790" [ 1421.618331] env[63371]: _type = "Task" [ 1421.618331] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.620514] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1421.620755] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1421.620913] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1421.621135] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1421.621284] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1421.621443] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1421.621652] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1421.621826] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1421.622020] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1421.622200] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1421.622381] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1421.623261] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b05fe69-d8f6-4a72-a7e8-e314c313e829 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.635705] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.636852] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498f4b80-1272-4961-b9d6-a4887847f1e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.661689] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773789, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062443} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.661962] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1421.662770] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24ebfbe-b6d0-4807-ac80-7e8e60968f9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.693560] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e/44cc8606-24f5-4f6b-b96f-3559c9c3f06e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1421.694967] env[63371]: DEBUG nova.compute.utils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1421.696842] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66ce78c8-4cb1-4f28-b066-1c61afae68a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.713545] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1421.716352] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1421.716763] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1421.725600] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1421.725600] env[63371]: value = "task-1773791" [ 1421.725600] env[63371]: _type = "Task" [ 1421.725600] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.732740] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773791, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.776315] env[63371]: DEBUG nova.policy [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3dec49b67cd49159192b5c2756fc2e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f2fde472b14ab9a4d20947ca714191', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1422.133298] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773790, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.178938] env[63371]: DEBUG nova.compute.manager [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Received event network-vif-plugged-292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1422.179167] env[63371]: DEBUG oslo_concurrency.lockutils [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] Acquiring lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.179378] env[63371]: DEBUG oslo_concurrency.lockutils [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.179541] env[63371]: DEBUG oslo_concurrency.lockutils [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.180119] env[63371]: DEBUG nova.compute.manager [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] No waiting events found dispatching network-vif-plugged-292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1422.180119] env[63371]: WARNING nova.compute.manager [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Received unexpected event network-vif-plugged-292614c6-49c4-4096-afda-debce88edee1 for instance with vm_state building and task_state spawning. [ 1422.233327] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773791, 'name': ReconfigVM_Task, 'duration_secs': 0.290259} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.233611] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e/44cc8606-24f5-4f6b-b96f-3559c9c3f06e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.234295] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-094e2bea-905f-482d-9f89-5bf054c7ecbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.244454] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1422.244454] env[63371]: value = "task-1773792" [ 1422.244454] env[63371]: _type = "Task" [ 1422.244454] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.244454] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance ca53accc-a15f-4503-87e5-7cbf3e2c0b43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244454] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 47c1c242-d190-4523-8033-307c5a9b7535 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244454] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 201a2d1e-9e2c-4c07-92be-200408874ad4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244814] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 33cf00ea-3195-41cf-9b7a-a8e64496a122 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244814] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cbcdfe1a-86a4-4a12-99b5-44d291d41769 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244814] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cfbd0c7c-243e-497a-acb1-ab9323c23574 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244814] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245134] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e4608e3c-7083-42fa-b88c-8ee007ef7f60 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245134] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e0369f27-68ea-49c4-8524-3dbbb3cde96e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245134] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e912c210-3ae1-47ce-b9cd-afebf6195606 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245134] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 713dfaf5-d11f-4af2-af92-66a596b0ed4a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245250] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance af1281ba-c3be-43b4-a039-86d94bd9efe4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245250] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245250] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 76c861a7-30f2-40f4-b723-7912975f36f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245250] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245250] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e00c2e45-b8bc-440b-8b58-a21f127192c7 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245390] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance dc6ef0a7-1744-4b90-b385-913cb796f7d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245390] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 852e14a7-2f9f-421c-9804-56c885885c7d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245390] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245390] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245500] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 44cc8606-24f5-4f6b-b96f-3559c9c3f06e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245500] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9249f27a-1985-4be1-947c-e433c7aa26f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245500] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b48a8e83-e581-4886-833b-bbce155d40d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.256867] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773792, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.366202] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Successfully created port: 9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1422.395182] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Successfully updated port: 292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1422.633738] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773790, 'name': PowerOnVM_Task, 'duration_secs': 0.613048} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.634020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1422.634229] env[63371]: INFO nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Took 10.26 seconds to spawn the instance on the hypervisor. [ 1422.634507] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1422.635241] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8eaa38-087c-4fb5-99b8-5f0262bcd30e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.725360] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1422.754175] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e8bd5802-d2ff-4348-92d4-c23277f4eaeb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1422.765035] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773792, 'name': Rename_Task, 'duration_secs': 0.248461} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.765035] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1422.765035] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da99f82f-89d6-4f5a-b3a6-1e54a02715d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.769292] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1422.769681] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1422.770076] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1422.771082] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1422.771539] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1422.771850] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1422.772316] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1422.773021] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1422.773021] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1422.773021] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1422.773585] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1422.775352] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e71e7a-2559-4cc3-b7e3-200f8633f5fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.787674] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696b2b16-2fdf-4850-a08a-64e3666a6ee7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.792623] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1422.792623] env[63371]: value = "task-1773793" [ 1422.792623] env[63371]: _type = "Task" [ 1422.792623] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.809816] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773793, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.900648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.900648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquired lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.900648] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1423.157585] env[63371]: INFO nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Took 45.52 seconds to build instance. [ 1423.257395] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b5e259ea-d103-41c6-84b3-748813bb514d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1423.310913] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773793, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.433134] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1423.659966] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.573s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.706347] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updating instance_info_cache with network_info: [{"id": "292614c6-49c4-4096-afda-debce88edee1", "address": "fa:16:3e:0d:77:2d", "network": {"id": "d9cab695-43ca-4b82-aeb3-d16f79b8cf36", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-890050549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bda622ab1474b76a46a5ba68977188b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292614c6-49", "ovs_interfaceid": "292614c6-49c4-4096-afda-debce88edee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.760994] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance fb2ddd3e-7adc-4a34-8797-0e98fdf19379 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1423.795023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.795023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.795023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.795515] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.797136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.803741] env[63371]: INFO nova.compute.manager [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Terminating instance [ 1423.806260] env[63371]: DEBUG nova.compute.manager [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1423.806469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1423.807272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432e22e5-2e28-4051-9fc1-d370bc9dceab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.813556] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773793, 'name': PowerOnVM_Task, 'duration_secs': 0.842866} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.814205] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1423.814353] env[63371]: INFO nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Took 8.73 seconds to spawn the instance on the hypervisor. [ 1423.814531] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1423.815307] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fb0a01-4b88-45e6-b83d-1fd1a4ab93aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.821349] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1423.821819] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-753c5e93-f241-4f53-93dc-f70678e69197 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.999506] env[63371]: DEBUG nova.compute.manager [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-changed-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1423.999704] env[63371]: DEBUG nova.compute.manager [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing instance network info cache due to event network-changed-d92b8632-8794-486c-a8eb-5c8844009035. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1423.999933] env[63371]: DEBUG oslo_concurrency.lockutils [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.001053] env[63371]: DEBUG oslo_concurrency.lockutils [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.001053] env[63371]: DEBUG nova.network.neutron [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing network info cache for port d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.167018] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1424.209209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Releasing lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.209547] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance network_info: |[{"id": "292614c6-49c4-4096-afda-debce88edee1", "address": "fa:16:3e:0d:77:2d", "network": {"id": "d9cab695-43ca-4b82-aeb3-d16f79b8cf36", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-890050549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bda622ab1474b76a46a5ba68977188b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292614c6-49", "ovs_interfaceid": "292614c6-49c4-4096-afda-debce88edee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1424.210191] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:77:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '292614c6-49c4-4096-afda-debce88edee1', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1424.218861] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Creating folder: Project (1bda622ab1474b76a46a5ba68977188b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1424.219507] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7f5839f-ca3a-4f19-9a9b-ee4a55528a0c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.231383] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Created folder: Project (1bda622ab1474b76a46a5ba68977188b) in parent group-v368199. [ 1424.231579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Creating folder: Instances. Parent ref: group-v368287. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1424.231899] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7556689b-ae67-41ff-bee9-6cd00e6fa646 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.246459] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Created folder: Instances in parent group-v368287. [ 1424.246703] env[63371]: DEBUG oslo.service.loopingcall [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.246890] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1424.247122] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85bc444f-12f0-4cf4-b52c-0dc6fe3b1326 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.266103] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 36b81143-211f-4c77-854b-abe0d3f39ce4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1424.273172] env[63371]: DEBUG nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Received event network-changed-292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1424.273382] env[63371]: DEBUG nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Refreshing instance network info cache due to event network-changed-292614c6-49c4-4096-afda-debce88edee1. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1424.273602] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Acquiring lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.273744] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Acquired lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.273902] env[63371]: DEBUG nova.network.neutron [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Refreshing network info cache for port 292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.283041] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1424.283041] env[63371]: value = "task-1773797" [ 1424.283041] env[63371]: _type = "Task" [ 1424.283041] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.288341] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Successfully updated port: 9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1424.292962] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773797, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.338304] env[63371]: INFO nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Took 41.98 seconds to build instance. [ 1424.467550] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1424.467824] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1424.467995] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleting the datastore file [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.468296] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40462810-838d-4119-adbf-7fdd774de0dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.475310] env[63371]: DEBUG oslo_vmware.api [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1424.475310] env[63371]: value = "task-1773798" [ 1424.475310] env[63371]: _type = "Task" [ 1424.475310] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.485078] env[63371]: DEBUG oslo_vmware.api [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773798, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.660092] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1424.661094] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e0f192-9ea7-4bdb-8684-9caa253e6f19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.668028] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1424.668211] env[63371]: ERROR oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk due to incomplete transfer. [ 1424.668466] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8003cc65-8bcd-4c5e-b7b1-74c9aad55df8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.684529] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1424.684850] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Uploaded image 67c79a69-90fa-469e-b65b-470387ba8d71 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1424.687350] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1424.687600] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-02d8f18f-80d2-449b-9816-9f469e7a7ac6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.694586] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1424.694586] env[63371]: value = "task-1773799" [ 1424.694586] env[63371]: _type = "Task" [ 1424.694586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.698814] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.706081] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773799, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.770569] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 855005ae-3b0e-4ad7-80cf-266075fc6d0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1424.792313] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773797, 'name': CreateVM_Task, 'duration_secs': 0.468492} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.793552] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1424.793856] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.794226] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.794473] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.796109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.796532] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.796844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1424.801023] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6656c820-0041-40b2-8e74-a8acebd2a4b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.803520] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1424.803520] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529417e7-944c-8e33-c92e-8ec1129733f3" [ 1424.803520] env[63371]: _type = "Task" [ 1424.803520] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.813366] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529417e7-944c-8e33-c92e-8ec1129733f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.840257] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.073s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.854419] env[63371]: DEBUG nova.network.neutron [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updated VIF entry in instance network info cache for port d92b8632-8794-486c-a8eb-5c8844009035. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1424.855597] env[63371]: DEBUG nova.network.neutron [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.990447] env[63371]: DEBUG oslo_vmware.api [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773798, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243674} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.990837] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1424.991124] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1424.991340] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1424.991529] env[63371]: INFO nova.compute.manager [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1424.991794] env[63371]: DEBUG oslo.service.loopingcall [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.992024] env[63371]: DEBUG nova.compute.manager [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1424.992123] env[63371]: DEBUG nova.network.neutron [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1425.130756] env[63371]: DEBUG nova.network.neutron [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updated VIF entry in instance network info cache for port 292614c6-49c4-4096-afda-debce88edee1. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.131209] env[63371]: DEBUG nova.network.neutron [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updating instance_info_cache with network_info: [{"id": "292614c6-49c4-4096-afda-debce88edee1", "address": "fa:16:3e:0d:77:2d", "network": {"id": "d9cab695-43ca-4b82-aeb3-d16f79b8cf36", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-890050549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bda622ab1474b76a46a5ba68977188b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292614c6-49", "ovs_interfaceid": "292614c6-49c4-4096-afda-debce88edee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.205144] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773799, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.277354] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 50d5eac1-0752-4089-948c-b04439df6f6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1425.314529] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529417e7-944c-8e33-c92e-8ec1129733f3, 'name': SearchDatastore_Task, 'duration_secs': 0.075508} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.314802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.315060] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1425.315744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.315744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.315744] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.316407] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbaa012b-239d-4f9d-958a-0770e418aed3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.332648] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.332830] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1425.333645] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b7d40b0-9df3-4aa9-98c4-d5869697d31c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.339199] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1425.339199] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5265edce-116e-6dae-e6b7-079b6b77b768" [ 1425.339199] env[63371]: _type = "Task" [ 1425.339199] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.349024] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1425.349934] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5265edce-116e-6dae-e6b7-079b6b77b768, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.358764] env[63371]: DEBUG oslo_concurrency.lockutils [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.362604] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1425.512115] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.634095] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Releasing lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.634459] env[63371]: DEBUG nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Received event network-vif-plugged-9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1425.634659] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Acquiring lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.634867] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.635039] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.635289] env[63371]: DEBUG nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] No waiting events found dispatching network-vif-plugged-9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1425.635378] env[63371]: WARNING nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Received unexpected event network-vif-plugged-9aa089f3-fe69-452e-b5e4-4daac745b9bb for instance with vm_state building and task_state spawning. [ 1425.704660] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773799, 'name': Destroy_Task, 'duration_secs': 0.884637} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.704975] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroyed the VM [ 1425.705299] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1425.705558] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-364ba79c-f718-4b7f-82c3-e1522782dc43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.711967] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1425.711967] env[63371]: value = "task-1773800" [ 1425.711967] env[63371]: _type = "Task" [ 1425.711967] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.721527] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773800, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.780952] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance be37eb1c-8582-4446-afd6-ae11a8cadf95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1425.854405] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5265edce-116e-6dae-e6b7-079b6b77b768, 'name': SearchDatastore_Task, 'duration_secs': 0.030781} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.857535] env[63371]: DEBUG nova.network.neutron [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.858963] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a424979-97cc-4ca3-b148-cea690337dd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.865855] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1425.865855] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5289ae37-6385-bdae-22d2-95389621d005" [ 1425.865855] env[63371]: _type = "Task" [ 1425.865855] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.873700] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5289ae37-6385-bdae-22d2-95389621d005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.877560] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.014577] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.014957] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance network_info: |[{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1426.016029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:5e:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9aa089f3-fe69-452e-b5e4-4daac745b9bb', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1426.023598] env[63371]: DEBUG oslo.service.loopingcall [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.023817] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1426.024967] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-935e0699-eeb1-41ac-892a-71a08d56b43f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.043018] env[63371]: DEBUG nova.compute.manager [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received event network-changed-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1426.043177] env[63371]: DEBUG nova.compute.manager [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Refreshing instance network info cache due to event network-changed-b03ddfde-3b36-43a8-8c6a-00cd704bce22. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1426.043382] env[63371]: DEBUG oslo_concurrency.lockutils [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] Acquiring lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.043521] env[63371]: DEBUG oslo_concurrency.lockutils [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] Acquired lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.043676] env[63371]: DEBUG nova.network.neutron [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Refreshing network info cache for port b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.047250] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1426.047250] env[63371]: value = "task-1773801" [ 1426.047250] env[63371]: _type = "Task" [ 1426.047250] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.055186] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773801, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.225984] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773800, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.283978] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 64fc862c-a755-4cac-997b-7a8328638269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1426.287713] env[63371]: DEBUG nova.compute.manager [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Received event network-changed-9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1426.287950] env[63371]: DEBUG nova.compute.manager [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Refreshing instance network info cache due to event network-changed-9aa089f3-fe69-452e-b5e4-4daac745b9bb. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1426.288180] env[63371]: DEBUG oslo_concurrency.lockutils [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] Acquiring lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.288367] env[63371]: DEBUG oslo_concurrency.lockutils [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] Acquired lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.288491] env[63371]: DEBUG nova.network.neutron [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Refreshing network info cache for port 9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.290131] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.290212] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.290699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.290699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.290794] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.294844] env[63371]: INFO nova.compute.manager [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Terminating instance [ 1426.296882] env[63371]: DEBUG nova.compute.manager [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1426.297102] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1426.298819] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d15d7d-9b2b-468b-af55-53fa962263b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.307752] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1426.308010] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8324933-991e-4e8f-9c63-6398167d80f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.315451] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1426.315451] env[63371]: value = "task-1773802" [ 1426.315451] env[63371]: _type = "Task" [ 1426.315451] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.324316] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.363422] env[63371]: INFO nova.compute.manager [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Took 1.37 seconds to deallocate network for instance. [ 1426.381650] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5289ae37-6385-bdae-22d2-95389621d005, 'name': SearchDatastore_Task, 'duration_secs': 0.043757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.381785] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.382247] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1/9249f27a-1985-4be1-947c-e433c7aa26f1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1426.383048] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-252a58f8-6bba-4527-92f4-f277629c3929 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.390076] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1426.390076] env[63371]: value = "task-1773803" [ 1426.390076] env[63371]: _type = "Task" [ 1426.390076] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.403808] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.559094] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773801, 'name': CreateVM_Task, 'duration_secs': 0.504633} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.559286] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1426.560040] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.560224] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.560560] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1426.560823] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48dbf780-3fad-4c75-aa73-e5d9f87d7e31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.567027] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1426.567027] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5241a8c5-56c7-64cf-7b53-0e7d5b01e663" [ 1426.567027] env[63371]: _type = "Task" [ 1426.567027] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.573674] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5241a8c5-56c7-64cf-7b53-0e7d5b01e663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.731457] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773800, 'name': RemoveSnapshot_Task} progress is 50%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.790988] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance dcf8063b-56eb-439c-bee5-139a1e157714 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1426.831569] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773802, 'name': PowerOffVM_Task, 'duration_secs': 0.216507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.831875] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1426.832062] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1426.832346] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-792d0aff-d04e-4543-8846-b3c0aa20fb2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.878163] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.902177] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773803, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.903398] env[63371]: DEBUG nova.network.neutron [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updated VIF entry in instance network info cache for port b03ddfde-3b36-43a8-8c6a-00cd704bce22. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1426.903880] env[63371]: DEBUG nova.network.neutron [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating instance_info_cache with network_info: [{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.907511] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1426.907945] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1426.908261] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Deleting the datastore file [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1426.910798] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d27ee27-de74-4b52-a0ee-7b54578f78ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.917153] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1426.917153] env[63371]: value = "task-1773805" [ 1426.917153] env[63371]: _type = "Task" [ 1426.917153] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.925904] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.079017] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5241a8c5-56c7-64cf-7b53-0e7d5b01e663, 'name': SearchDatastore_Task, 'duration_secs': 0.008809} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.079017] env[63371]: DEBUG nova.network.neutron [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updated VIF entry in instance network info cache for port 9aa089f3-fe69-452e-b5e4-4daac745b9bb. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1427.079194] env[63371]: DEBUG nova.network.neutron [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.079653] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.079850] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1427.080153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.080337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.080554] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1427.080819] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd96bf83-f510-4606-95a3-31f8430f1aae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.089746] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1427.089915] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1427.091264] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65a71bb1-fefb-4f2a-8a70-43e9728cd37a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.101134] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1427.101134] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c3bdf2-26bd-97f1-c4f7-b2222ce84aed" [ 1427.101134] env[63371]: _type = "Task" [ 1427.101134] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.110348] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c3bdf2-26bd-97f1-c4f7-b2222ce84aed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.224023] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773800, 'name': RemoveSnapshot_Task, 'duration_secs': 1.195948} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.224023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1427.224152] env[63371]: INFO nova.compute.manager [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 18.73 seconds to snapshot the instance on the hypervisor. [ 1427.301633] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1427.402028] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522892} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.402330] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1/9249f27a-1985-4be1-947c-e433c7aa26f1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1427.402575] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1427.402824] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f82928b-8aa3-4ca9-80dd-f7b856bb8419 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.406312] env[63371]: DEBUG oslo_concurrency.lockutils [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] Releasing lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.409036] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1427.409036] env[63371]: value = "task-1773806" [ 1427.409036] env[63371]: _type = "Task" [ 1427.409036] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.416462] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773806, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.426472] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.584360] env[63371]: DEBUG oslo_concurrency.lockutils [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] Releasing lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.584630] env[63371]: DEBUG nova.compute.manager [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Received event network-vif-deleted-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1427.612094] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c3bdf2-26bd-97f1-c4f7-b2222ce84aed, 'name': SearchDatastore_Task, 'duration_secs': 0.011624} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.612991] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0afef573-8665-491d-be1d-81db70ffb883 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.618346] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1427.618346] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52650b37-7c8c-a468-c5d1-2e5f4efd5499" [ 1427.618346] env[63371]: _type = "Task" [ 1427.618346] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.626285] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52650b37-7c8c-a468-c5d1-2e5f4efd5499, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.804269] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 7e66011a-4fed-471f-82ea-e1016f92ad39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1427.918741] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773806, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071588} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.921890] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1427.922647] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46c6a5d-6ed7-493f-bd7c-c0977011d2ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.930179] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773805, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.826538} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.939330] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1427.939560] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1427.939791] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1427.940013] env[63371]: INFO nova.compute.manager [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1427.940349] env[63371]: DEBUG oslo.service.loopingcall [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1427.949220] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1/9249f27a-1985-4be1-947c-e433c7aa26f1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1427.949509] env[63371]: DEBUG nova.compute.manager [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1427.949605] env[63371]: DEBUG nova.network.neutron [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1427.951648] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e14d557e-b3d8-4b23-912f-86784c37632f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.974273] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1427.974273] env[63371]: value = "task-1773807" [ 1427.974273] env[63371]: _type = "Task" [ 1427.974273] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.982575] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773807, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.133115] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52650b37-7c8c-a468-c5d1-2e5f4efd5499, 'name': SearchDatastore_Task, 'duration_secs': 0.086047} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.133392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.133644] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/b48a8e83-e581-4886-833b-bbce155d40d9.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1428.133903] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd357576-e60c-4e66-9622-939c7b851cc6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.143468] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1428.143468] env[63371]: value = "task-1773808" [ 1428.143468] env[63371]: _type = "Task" [ 1428.143468] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.153061] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.308022] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e6cd62ce-f6d2-4e5b-acbc-7527a94e0932 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1428.486850] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773807, 'name': ReconfigVM_Task, 'duration_secs': 0.372419} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.487169] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1/9249f27a-1985-4be1-947c-e433c7aa26f1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1428.488146] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90f001b1-72ec-41d9-b2ca-e666af3b39ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.496846] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1428.496846] env[63371]: value = "task-1773809" [ 1428.496846] env[63371]: _type = "Task" [ 1428.496846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.507979] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773809, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.655839] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773808, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.811586] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 594ff846-8e3e-4882-8ddc-41f824a77a5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1428.930187] env[63371]: DEBUG nova.compute.manager [req-c9a0e995-19bf-42fe-9edf-e6744c3d8057 req-2ea8f5e8-1114-4055-8f6e-a0341cbb6826 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received event network-vif-deleted-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1428.930187] env[63371]: INFO nova.compute.manager [req-c9a0e995-19bf-42fe-9edf-e6744c3d8057 req-2ea8f5e8-1114-4055-8f6e-a0341cbb6826 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Neutron deleted interface 9a4b63df-9697-47a1-81ad-c69476a80975; detaching it from the instance and deleting it from the info cache [ 1428.930187] env[63371]: DEBUG nova.network.neutron [req-c9a0e995-19bf-42fe-9edf-e6744c3d8057 req-2ea8f5e8-1114-4055-8f6e-a0341cbb6826 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.006868] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773809, 'name': Rename_Task, 'duration_secs': 0.25852} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.007101] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1429.007348] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17b39a8a-01c7-4a04-982d-1b1b4049b783 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.013659] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1429.013659] env[63371]: value = "task-1773810" [ 1429.013659] env[63371]: _type = "Task" [ 1429.013659] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.023113] env[63371]: DEBUG nova.network.neutron [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.027813] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.155944] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58773} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.156222] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/b48a8e83-e581-4886-833b-bbce155d40d9.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1429.156432] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1429.156679] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e25c3f0f-6849-46bb-b5dc-b9927224733d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.164585] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1429.164585] env[63371]: value = "task-1773811" [ 1429.164585] env[63371]: _type = "Task" [ 1429.164585] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.176200] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.241254] env[63371]: DEBUG nova.compute.manager [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1429.242170] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ed36cc-f055-4b80-a129-ea9700c414a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.315291] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b523486c-adae-4322-80be-1f3bf33ca192 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1429.435922] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e9af905-e91a-4a80-92f1-4133f37a19bd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.446254] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3e43db-1d66-4d62-80f5-f90521d28325 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.488537] env[63371]: DEBUG nova.compute.manager [req-c9a0e995-19bf-42fe-9edf-e6744c3d8057 req-2ea8f5e8-1114-4055-8f6e-a0341cbb6826 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Detach interface failed, port_id=9a4b63df-9697-47a1-81ad-c69476a80975, reason: Instance ca53accc-a15f-4503-87e5-7cbf3e2c0b43 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1429.523203] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773810, 'name': PowerOnVM_Task, 'duration_secs': 0.500304} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.523484] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1429.523678] env[63371]: INFO nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Took 7.93 seconds to spawn the instance on the hypervisor. [ 1429.523917] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1429.524680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a282db-2f3a-4447-9127-3465da6312b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.528805] env[63371]: INFO nova.compute.manager [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Took 1.58 seconds to deallocate network for instance. [ 1429.675318] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070698} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.675599] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1429.676370] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fdefb7-223a-4612-94dc-f8bbe0b5aa80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.698472] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/b48a8e83-e581-4886-833b-bbce155d40d9.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1429.698872] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a76d851-d4a9-41e8-8c2e-58e241f408fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.717975] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1429.717975] env[63371]: value = "task-1773812" [ 1429.717975] env[63371]: _type = "Task" [ 1429.717975] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.725779] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.753102] env[63371]: INFO nova.compute.manager [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] instance snapshotting [ 1429.755796] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a85650-4de6-48c1-9d93-5709e6d647f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.776088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cccf12-c23c-46b4-9969-8c91401322e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.818165] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f8119ade-7018-4ad8-82fe-baa0a6753c64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1430.042103] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.044604] env[63371]: INFO nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Took 41.73 seconds to build instance. [ 1430.228401] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773812, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.287936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1430.288407] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-05d380b6-4aee-429e-ba1d-0424503f2216 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.295898] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1430.295898] env[63371]: value = "task-1773813" [ 1430.295898] env[63371]: _type = "Task" [ 1430.295898] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.304614] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773813, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.320748] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1430.321150] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1430.321223] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1430.547591] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.950s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.607143] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "9249f27a-1985-4be1-947c-e433c7aa26f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.607508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.607823] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.608082] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.608305] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.610764] env[63371]: INFO nova.compute.manager [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Terminating instance [ 1430.612913] env[63371]: DEBUG nova.compute.manager [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1430.613173] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.614210] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5eb2fb-cf95-46be-8dba-c1bda5bb8eb6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.625011] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1430.625011] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e07793a-174a-49f6-8880-2c544d7b15c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.633706] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1430.633706] env[63371]: value = "task-1773814" [ 1430.633706] env[63371]: _type = "Task" [ 1430.633706] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.642619] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773814, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.730737] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773812, 'name': ReconfigVM_Task, 'duration_secs': 0.580319} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.731044] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfigured VM instance instance-00000021 to attach disk [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/b48a8e83-e581-4886-833b-bbce155d40d9.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1430.731702] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30042906-4bfb-4f7e-aaf3-430a16d4c92f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.737801] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1430.737801] env[63371]: value = "task-1773815" [ 1430.737801] env[63371]: _type = "Task" [ 1430.737801] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.748928] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773815, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.805825] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773813, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.845806] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8ca50c-9537-462c-9dcd-ed4459c63f3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.855331] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783ae952-2466-435a-8294-c8f7d4f1ee51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.888297] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af922a8-65ee-4225-8ea8-448e542f317c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.896474] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0754d8f2-d367-40f1-88f3-40838d2f6c4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.911936] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.050747] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1431.144320] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773814, 'name': PowerOffVM_Task, 'duration_secs': 0.184391} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.144592] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1431.144761] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1431.145013] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f2fae52-4aa4-455e-826f-db8199aa4e41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.223115] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1431.223389] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1431.223575] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Deleting the datastore file [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.223837] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c741e09-f63e-44f6-a4d2-bcf5a4150629 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.230659] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1431.230659] env[63371]: value = "task-1773817" [ 1431.230659] env[63371]: _type = "Task" [ 1431.230659] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.238551] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.246684] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773815, 'name': Rename_Task, 'duration_secs': 0.133162} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.246947] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1431.247201] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea562062-0fe3-4196-8bcd-b8dc9616178d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.254248] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1431.254248] env[63371]: value = "task-1773818" [ 1431.254248] env[63371]: _type = "Task" [ 1431.254248] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.268253] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.309508] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773813, 'name': CreateSnapshot_Task, 'duration_secs': 0.646305} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.310221] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1431.310626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9da2ab5-77f7-418f-85f1-f478d654f290 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.415197] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1431.574907] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.743291] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415705} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.743706] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.744010] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.744307] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.744576] env[63371]: INFO nova.compute.manager [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1431.744943] env[63371]: DEBUG oslo.service.loopingcall [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.745247] env[63371]: DEBUG nova.compute.manager [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1431.745399] env[63371]: DEBUG nova.network.neutron [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1431.764198] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773818, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.829176] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1431.829511] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-284e07f6-9f27-4ee5-b038-cc1f957a7ef0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.838811] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1431.838811] env[63371]: value = "task-1773819" [ 1431.838811] env[63371]: _type = "Task" [ 1431.838811] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.847699] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.920764] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1431.921230] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.747s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.921506] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.931s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.923185] env[63371]: INFO nova.compute.claims [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1431.925994] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.926300] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1432.045364] env[63371]: DEBUG nova.compute.manager [req-8247e12c-bd41-4ad4-a549-ad1e88b4129d req-4f7666c7-58d0-4b1f-8292-a313dda84498 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Received event network-vif-deleted-292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1432.046292] env[63371]: INFO nova.compute.manager [req-8247e12c-bd41-4ad4-a549-ad1e88b4129d req-4f7666c7-58d0-4b1f-8292-a313dda84498 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Neutron deleted interface 292614c6-49c4-4096-afda-debce88edee1; detaching it from the instance and deleting it from the info cache [ 1432.046517] env[63371]: DEBUG nova.network.neutron [req-8247e12c-bd41-4ad4-a549-ad1e88b4129d req-4f7666c7-58d0-4b1f-8292-a313dda84498 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.264869] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773818, 'name': PowerOnVM_Task, 'duration_secs': 0.589685} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.265115] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1432.265322] env[63371]: INFO nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Took 9.54 seconds to spawn the instance on the hypervisor. [ 1432.265500] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1432.266675] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6432ad-713d-4411-b3c0-e77b29977e61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.349534] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.433227] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] There are 10 instances to clean {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1432.433490] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1432.524904] env[63371]: DEBUG nova.network.neutron [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.549345] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7da72c05-8059-42af-b458-c321d2f4be20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.558738] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eb4649-ef1f-4e9d-a995-1b9dfb6cf3b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.593373] env[63371]: DEBUG nova.compute.manager [req-8247e12c-bd41-4ad4-a549-ad1e88b4129d req-4f7666c7-58d0-4b1f-8292-a313dda84498 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Detach interface failed, port_id=292614c6-49c4-4096-afda-debce88edee1, reason: Instance 9249f27a-1985-4be1-947c-e433c7aa26f1 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1432.783212] env[63371]: INFO nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Took 43.19 seconds to build instance. [ 1432.850550] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.939614] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1433.028247] env[63371]: INFO nova.compute.manager [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Took 1.28 seconds to deallocate network for instance. [ 1433.284744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.558s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.349506] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.391898] env[63371]: INFO nova.compute.manager [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Rescuing [ 1433.392180] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.392341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.392501] env[63371]: DEBUG nova.network.neutron [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1433.418440] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064599b0-3006-42e7-ba00-e25b7c03faaf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.427373] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7194cb25-b9db-4012-93d8-b877e33af114 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.457039] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1433.459561] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc245636-d5fe-4e96-8d56-2e342ce0b356 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.467041] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b69ed3-1ed6-4bbb-b39c-9288f3c8c816 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.480375] env[63371]: DEBUG nova.compute.provider_tree [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.534965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.789526] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1433.850869] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task, 'duration_secs': 1.645343} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.851170] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Created linked-clone VM from snapshot [ 1433.851926] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53084246-d033-4002-b28c-98f274da9a86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.859448] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Uploading image 40f4ba8c-3aff-4162-89c5-27a0765d4f79 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1433.881805] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1433.881805] env[63371]: value = "vm-368292" [ 1433.881805] env[63371]: _type = "VirtualMachine" [ 1433.881805] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1433.882101] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d46c4c5e-98e9-44d3-9db6-f4283fb92981 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.889621] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease: (returnval){ [ 1433.889621] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1433.889621] env[63371]: _type = "HttpNfcLease" [ 1433.889621] env[63371]: } obtained for exporting VM: (result){ [ 1433.889621] env[63371]: value = "vm-368292" [ 1433.889621] env[63371]: _type = "VirtualMachine" [ 1433.889621] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1433.889621] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the lease: (returnval){ [ 1433.889621] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1433.889621] env[63371]: _type = "HttpNfcLease" [ 1433.889621] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1433.897604] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1433.897604] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1433.897604] env[63371]: _type = "HttpNfcLease" [ 1433.897604] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1433.963351] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1433.984585] env[63371]: DEBUG nova.scheduler.client.report [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1434.121923] env[63371]: DEBUG nova.network.neutron [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.313735] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.398289] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1434.398289] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1434.398289] env[63371]: _type = "HttpNfcLease" [ 1434.398289] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1434.398652] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1434.398652] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1434.398652] env[63371]: _type = "HttpNfcLease" [ 1434.398652] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1434.399521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2494d5-12ab-4db8-8dba-1e80efc6b51c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.407756] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1434.407986] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1434.466901] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1434.491504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.492049] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1434.495416] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.467s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.495597] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.498083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.728s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.499542] env[63371]: INFO nova.compute.claims [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1434.509775] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-22cde28f-ef68-4632-ad6f-b46248d14cf7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.522818] env[63371]: INFO nova.scheduler.client.report [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Deleted allocations for instance af1281ba-c3be-43b4-a039-86d94bd9efe4 [ 1434.624960] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.971901] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1435.004107] env[63371]: DEBUG nova.compute.utils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1435.007728] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1435.007899] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1435.034943] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.476s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.049237] env[63371]: DEBUG nova.policy [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '338e5dcf03fa465484fbc3c9cf1ccd83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '941e719c77a84e8d8fe0107968a0f527', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1435.163747] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1435.163747] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-217825c4-82ab-4bf1-8890-ffcae84a113f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.172217] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1435.172217] env[63371]: value = "task-1773821" [ 1435.172217] env[63371]: _type = "Task" [ 1435.172217] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.181841] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.474752] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1435.514357] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1435.583911] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Successfully created port: 1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1435.683443] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773821, 'name': PowerOffVM_Task, 'duration_secs': 0.200102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.686243] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1435.687297] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4754b9c1-e9ce-4f45-907e-9de88bdaebd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.716333] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8cfdb9-d647-49bf-83a4-07b30b35c896 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.754156] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1435.756154] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e7806ea-0d7f-414a-a1f6-48cda3319b7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.762773] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1435.762773] env[63371]: value = "task-1773822" [ 1435.762773] env[63371]: _type = "Task" [ 1435.762773] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.775486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1435.775793] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1435.776674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.776674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.776674] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1435.776674] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0eb9c6e8-560a-4099-8f35-b68dc8290b18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.788271] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1435.788496] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1435.789438] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b714b56-8b65-4ef9-9c31-0089755c509e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.795296] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1435.795296] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5270ea18-d71f-b9f0-d072-64731040972c" [ 1435.795296] env[63371]: _type = "Task" [ 1435.795296] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.809176] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5270ea18-d71f-b9f0-d072-64731040972c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.982038] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1436.150990] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212aa88e-fbf3-411e-b698-889c546d2572 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.163198] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca015d3e-3530-49cd-ad62-1074050e8862 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.197022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedf0bf5-575e-456b-a5ac-68f97509b372 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.204941] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e22940d-e55c-419e-a9c4-73ca150013f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.221545] env[63371]: DEBUG nova.compute.provider_tree [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.307775] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5270ea18-d71f-b9f0-d072-64731040972c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.485753] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1436.524389] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1436.561887] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1436.561887] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1436.561887] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1436.563458] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1436.563627] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1436.563822] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1436.564062] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1436.564242] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1436.564411] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1436.564571] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1436.564741] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1436.565674] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69de49b-2027-4b17-88c3-765c2e45c9fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.583367] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d607497-5ed6-4b50-8a58-62348541ccfa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.725862] env[63371]: DEBUG nova.scheduler.client.report [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1436.810039] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5270ea18-d71f-b9f0-d072-64731040972c, 'name': SearchDatastore_Task, 'duration_secs': 0.567599} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.810039] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cbc0b05-ae8d-4462-a22e-ae0321343663 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.816637] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1436.816637] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5287dc04-6a8c-f610-ea3b-a103f5f1c793" [ 1436.816637] env[63371]: _type = "Task" [ 1436.816637] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.825927] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5287dc04-6a8c-f610-ea3b-a103f5f1c793, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.990293] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1437.077948] env[63371]: DEBUG nova.compute.manager [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received event network-vif-plugged-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1437.078242] env[63371]: DEBUG oslo_concurrency.lockutils [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] Acquiring lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.078530] env[63371]: DEBUG oslo_concurrency.lockutils [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.078699] env[63371]: DEBUG oslo_concurrency.lockutils [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.078866] env[63371]: DEBUG nova.compute.manager [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] No waiting events found dispatching network-vif-plugged-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1437.079040] env[63371]: WARNING nova.compute.manager [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received unexpected event network-vif-plugged-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 for instance with vm_state building and task_state spawning. [ 1437.220414] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Successfully updated port: 1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1437.232228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.232845] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1437.235882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.243s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.239025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.239025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.750s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.240213] env[63371]: INFO nova.compute.claims [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1437.276710] env[63371]: INFO nova.scheduler.client.report [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Deleted allocations for instance 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406 [ 1437.329318] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5287dc04-6a8c-f610-ea3b-a103f5f1c793, 'name': SearchDatastore_Task, 'duration_secs': 0.334403} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.329599] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.330092] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1437.330377] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c0c13e5-d7f8-4737-b76b-68116c11da09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.341384] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1437.341384] env[63371]: value = "task-1773823" [ 1437.341384] env[63371]: _type = "Task" [ 1437.341384] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.349769] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.492100] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1437.492394] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances with incomplete migration {{(pid=63371) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1437.721515] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.721725] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquired lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.721987] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1437.737717] env[63371]: DEBUG nova.compute.utils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1437.739108] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1437.739569] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1437.788384] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.938s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.817484] env[63371]: DEBUG nova.policy [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d34f2f64ede4652b10ec546783e859c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd0696c715249779160762b8ecd83e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1437.853171] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773823, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.994789] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.169906] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Successfully created port: e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1438.246723] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1438.273226] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1438.355289] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.975449} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.355571] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1438.356413] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331ea51a-7379-4bf7-8825-9524bcb3d0f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.393475] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.397598] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efc2873d-2ded-46e3-9e4c-15815571fee1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.419907] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1438.419907] env[63371]: value = "task-1773824" [ 1438.419907] env[63371]: _type = "Task" [ 1438.419907] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.440593] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773824, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.662197] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.662197] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.664319] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updating instance_info_cache with network_info: [{"id": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "address": "fa:16:3e:ac:de:06", "network": {"id": "50743102-4d46-4fa2-b7b2-9d99ef746757", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-137039111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "941e719c77a84e8d8fe0107968a0f527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d08ea03-4a", "ovs_interfaceid": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.922464] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5af7c9c-9fb2-410a-afb2-a7c8eeb27051 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.937323] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773824, 'name': ReconfigVM_Task, 'duration_secs': 0.513661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.938524] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61debdc7-43aa-4a99-96ac-0496bc197608 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.942329] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfigured VM instance instance-00000021 to attach disk [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1438.943630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93a0549-2794-44df-ba07-fc9216b314a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.996861] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-405087bd-53b3-42af-8f5c-1659d20e6003 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.008286] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4ffe43-d37d-4e0f-b1ee-81de9e1f236d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.018864] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4153e71-6d7f-4351-9524-28b22b707223 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.023311] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1439.023311] env[63371]: value = "task-1773825" [ 1439.023311] env[63371]: _type = "Task" [ 1439.023311] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.036267] env[63371]: DEBUG nova.compute.provider_tree [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.041044] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773825, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.168715] env[63371]: DEBUG nova.compute.utils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1439.170408] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Releasing lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.172046] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Instance network_info: |[{"id": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "address": "fa:16:3e:ac:de:06", "network": {"id": "50743102-4d46-4fa2-b7b2-9d99ef746757", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-137039111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "941e719c77a84e8d8fe0107968a0f527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d08ea03-4a", "ovs_interfaceid": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1439.172179] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:de:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffcecdaa-a7b8-49fc-9371-dbdb7744688e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d08ea03-4a7c-43bc-9a11-db1f92c6c505', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1439.184029] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Creating folder: Project (941e719c77a84e8d8fe0107968a0f527). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1439.184029] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f98eb8e7-d91c-42bd-9c9b-02e0280c9cf1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.198570] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Created folder: Project (941e719c77a84e8d8fe0107968a0f527) in parent group-v368199. [ 1439.198570] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Creating folder: Instances. Parent ref: group-v368293. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1439.198570] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-faabab78-8e20-4cbd-9e5e-7b12973b7e42 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.211439] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Created folder: Instances in parent group-v368293. [ 1439.211740] env[63371]: DEBUG oslo.service.loopingcall [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1439.213200] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1439.213200] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e57d2ef-4e9b-4aac-ae68-a49e33fe6287 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.234571] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1439.234571] env[63371]: value = "task-1773828" [ 1439.234571] env[63371]: _type = "Task" [ 1439.234571] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.248808] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773828, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.264117] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1439.296706] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1439.297023] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1439.297583] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1439.297810] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1439.298076] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1439.298237] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1439.298451] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1439.298607] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1439.298805] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1439.299144] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1439.299358] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1439.301238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43824c43-bfaa-40b9-8f38-f5dac472f309 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.311732] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9827f8-642e-4273-8392-03caec6fd36b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.482508] env[63371]: DEBUG nova.compute.manager [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received event network-changed-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1439.482745] env[63371]: DEBUG nova.compute.manager [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Refreshing instance network info cache due to event network-changed-1d08ea03-4a7c-43bc-9a11-db1f92c6c505. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1439.482957] env[63371]: DEBUG oslo_concurrency.lockutils [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] Acquiring lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.483226] env[63371]: DEBUG oslo_concurrency.lockutils [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] Acquired lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.483406] env[63371]: DEBUG nova.network.neutron [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Refreshing network info cache for port 1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1439.497121] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.498099] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.536325] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773825, 'name': ReconfigVM_Task, 'duration_secs': 0.310548} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.536480] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1439.536706] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-614ca51c-e30b-40ff-a633-7c45f7b75ec7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.542967] env[63371]: DEBUG nova.scheduler.client.report [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1439.550801] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1439.550801] env[63371]: value = "task-1773829" [ 1439.550801] env[63371]: _type = "Task" [ 1439.550801] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.561742] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.672875] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.750233] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773828, 'name': CreateVM_Task, 'duration_secs': 0.494554} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.750432] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1439.751601] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.751770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.752171] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1439.752455] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8aae3ee-d2e8-4988-9675-3e8a09c9eb49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.760422] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1439.760422] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52994082-ae5d-dc32-da93-ee44f4ea8759" [ 1439.760422] env[63371]: _type = "Task" [ 1439.760422] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.771894] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52994082-ae5d-dc32-da93-ee44f4ea8759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.012040] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.012298] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1440.012420] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1440.050544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.812s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.051086] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1440.056903] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.619s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.061346] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.063456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.435s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.064984] env[63371]: INFO nova.compute.claims [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1440.087241] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773829, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.123595] env[63371]: INFO nova.scheduler.client.report [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Deleted allocations for instance 713dfaf5-d11f-4af2-af92-66a596b0ed4a [ 1440.279057] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52994082-ae5d-dc32-da93-ee44f4ea8759, 'name': SearchDatastore_Task, 'duration_secs': 0.013948} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.279915] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Successfully updated port: e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1440.282744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.282744] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1440.282744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.282744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.282883] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1440.282883] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4002938b-98e8-4454-9fc0-4297b23ed862 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.297564] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1440.298122] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1440.299298] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73af1f01-d624-486f-9a4d-01c5dd4028dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.309021] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1440.309021] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6aa72-4bec-1651-7cdb-82cb05b1d260" [ 1440.309021] env[63371]: _type = "Task" [ 1440.309021] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.325640] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6aa72-4bec-1651-7cdb-82cb05b1d260, 'name': SearchDatastore_Task, 'duration_secs': 0.013208} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.329344] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06f593a9-e194-4982-ad9b-397366cdd1a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.344059] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1440.344059] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52046a1a-3c66-6767-b285-952619451de4" [ 1440.344059] env[63371]: _type = "Task" [ 1440.344059] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.356226] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52046a1a-3c66-6767-b285-952619451de4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.376874] env[63371]: DEBUG nova.network.neutron [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updated VIF entry in instance network info cache for port 1d08ea03-4a7c-43bc-9a11-db1f92c6c505. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1440.377247] env[63371]: DEBUG nova.network.neutron [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updating instance_info_cache with network_info: [{"id": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "address": "fa:16:3e:ac:de:06", "network": {"id": "50743102-4d46-4fa2-b7b2-9d99ef746757", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-137039111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "941e719c77a84e8d8fe0107968a0f527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d08ea03-4a", "ovs_interfaceid": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.522270] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1440.522270] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1440.522270] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1440.538965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.538965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.538965] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1440.538965] env[63371]: DEBUG nova.objects.instance [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lazy-loading 'info_cache' on Instance uuid ca53accc-a15f-4503-87e5-7cbf3e2c0b43 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1440.557639] env[63371]: DEBUG nova.compute.utils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1440.561683] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1440.562179] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1440.572745] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773829, 'name': PowerOnVM_Task, 'duration_secs': 0.59941} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.573388] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.576310] env[63371]: DEBUG nova.compute.manager [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1440.580113] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fa30e8-1837-465f-b53b-9662e19fe141 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.634021] env[63371]: DEBUG nova.policy [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d34f2f64ede4652b10ec546783e859c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd0696c715249779160762b8ecd83e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1440.639147] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.269s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.784597] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.784886] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.784931] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.806322] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.806322] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.806322] env[63371]: INFO nova.compute.manager [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Attaching volume fd8f0908-509b-4986-8eae-d6db5f10b561 to /dev/sdb [ 1440.853488] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c237f4b1-d2ba-45e9-98da-e0e4d50d8e6b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.866515] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52046a1a-3c66-6767-b285-952619451de4, 'name': SearchDatastore_Task, 'duration_secs': 0.015461} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.869077] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.869395] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb/e8bd5802-d2ff-4348-92d4-c23277f4eaeb.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1440.869804] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16df55a6-6ba9-4555-b70c-2a048c16fa2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.873123] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21692c40-ba65-4e0b-a2cb-3dec0b3ffede {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.881217] env[63371]: DEBUG oslo_concurrency.lockutils [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] Releasing lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.884376] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1440.884376] env[63371]: value = "task-1773830" [ 1440.884376] env[63371]: _type = "Task" [ 1440.884376] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.900927] env[63371]: DEBUG nova.virt.block_device [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating existing volume attachment record: 71bc6b72-7d4c-4e64-9e3b-5d56e88566ea {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1440.903385] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.065644] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1441.267683] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Successfully created port: d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1441.372133] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1441.396872] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773830, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.597298] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1441.602924] env[63371]: DEBUG nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Received event network-vif-plugged-e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1441.603157] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Acquiring lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.603358] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.603522] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.603679] env[63371]: DEBUG nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] No waiting events found dispatching network-vif-plugged-e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1441.603830] env[63371]: WARNING nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Received unexpected event network-vif-plugged-e450db3c-69a8-4e46-817e-eb4d6310fb9d for instance with vm_state building and task_state spawning. [ 1441.603991] env[63371]: DEBUG nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Received event network-changed-e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1441.604290] env[63371]: DEBUG nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Refreshing instance network info cache due to event network-changed-e450db3c-69a8-4e46-817e-eb4d6310fb9d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1441.604476] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Acquiring lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.742438] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce673f2-34ad-4524-830c-0f456e8635df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.751754] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b90fb52-8e41-43d7-bddf-cc502bcda3c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.789025] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Updating instance_info_cache with network_info: [{"id": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "address": "fa:16:3e:04:f5:99", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape450db3c-69", "ovs_interfaceid": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.789025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066686d5-4a65-49d1-8fc4-fd8e4ac79693 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.798089] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090222db-3c29-4b1e-b38d-c1f1414ec69c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.815756] env[63371]: DEBUG nova.compute.provider_tree [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.903731] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773830, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628335} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.903989] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb/e8bd5802-d2ff-4348-92d4-c23277f4eaeb.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1441.904270] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1441.904580] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fc57df6-4692-4b50-be57-7c489029ab58 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.913730] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1441.913730] env[63371]: value = "task-1773834" [ 1441.913730] env[63371]: _type = "Task" [ 1441.913730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.928552] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773834, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.080703] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1442.109771] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1442.109996] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1442.110167] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1442.110354] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1442.110503] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1442.110649] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1442.110862] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1442.111048] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1442.111369] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1442.111454] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1442.111677] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1442.113174] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b482f69-f3e4-43da-be76-bfb1c006a034 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.122431] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fda759-5d1c-485e-8e25-c435c9af5dab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.292107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.292526] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance network_info: |[{"id": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "address": "fa:16:3e:04:f5:99", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape450db3c-69", "ovs_interfaceid": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1442.292846] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Acquired lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.293033] env[63371]: DEBUG nova.network.neutron [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Refreshing network info cache for port e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1442.298620] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:f5:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e450db3c-69a8-4e46-817e-eb4d6310fb9d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1442.313929] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating folder: Project (3cd0696c715249779160762b8ecd83e6). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1442.318632] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fc283f3-462a-4cfe-af57-da9677687c68 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.322381] env[63371]: DEBUG nova.scheduler.client.report [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1442.341774] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created folder: Project (3cd0696c715249779160762b8ecd83e6) in parent group-v368199. [ 1442.342032] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating folder: Instances. Parent ref: group-v368299. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1442.342328] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70631bdd-3d9e-4d9e-8109-bead086b9caa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.357306] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created folder: Instances in parent group-v368299. [ 1442.357700] env[63371]: DEBUG oslo.service.loopingcall [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1442.357924] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1442.358365] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa494e71-05c7-439d-b21b-97e3e2ac558b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.382169] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1442.382169] env[63371]: value = "task-1773837" [ 1442.382169] env[63371]: _type = "Task" [ 1442.382169] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.397550] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773837, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.429827] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773834, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081263} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.430302] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1442.431516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bc494c-6ec3-4495-8c21-65332c13df24 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.455771] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb/e8bd5802-d2ff-4348-92d4-c23277f4eaeb.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1442.456125] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18431aa5-f976-4ac9-bed7-d8c47a7ab538 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.480459] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1442.480459] env[63371]: value = "task-1773838" [ 1442.480459] env[63371]: _type = "Task" [ 1442.480459] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.491610] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773838, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.574593] env[63371]: INFO nova.compute.manager [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Unrescuing [ 1442.575199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.575199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.575324] env[63371]: DEBUG nova.network.neutron [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1442.602638] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.829118] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.829118] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1442.833290] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.178s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.833458] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.836354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.887s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.842784] env[63371]: INFO nova.compute.claims [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1442.876335] env[63371]: INFO nova.scheduler.client.report [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted allocations for instance cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1 [ 1442.902788] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773837, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.974331] env[63371]: DEBUG nova.network.neutron [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Updated VIF entry in instance network info cache for port e450db3c-69a8-4e46-817e-eb4d6310fb9d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.974712] env[63371]: DEBUG nova.network.neutron [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Updating instance_info_cache with network_info: [{"id": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "address": "fa:16:3e:04:f5:99", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape450db3c-69", "ovs_interfaceid": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.996381] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773838, 'name': ReconfigVM_Task, 'duration_secs': 0.367884} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.997958] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Reconfigured VM instance instance-00000022 to attach disk [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb/e8bd5802-d2ff-4348-92d4-c23277f4eaeb.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1442.997958] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c62e2a16-f661-4269-b0b3-516d7a10fb96 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.006799] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1443.006799] env[63371]: value = "task-1773839" [ 1443.006799] env[63371]: _type = "Task" [ 1443.006799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.021025] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773839, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.110545] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.110545] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1443.110545] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110545] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110545] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110545] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110792] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110792] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110792] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1443.110792] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.346220] env[63371]: DEBUG nova.compute.utils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1443.353999] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1443.356469] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1443.393245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.404263] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773837, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.481192] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Releasing lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.522031] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773839, 'name': Rename_Task, 'duration_secs': 0.204079} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.522172] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1443.522374] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abde7d01-595a-4200-afd2-de09c2fbaf62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.533356] env[63371]: DEBUG nova.policy [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d34f2f64ede4652b10ec546783e859c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd0696c715249779160762b8ecd83e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1443.535966] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1443.535966] env[63371]: value = "task-1773841" [ 1443.535966] env[63371]: _type = "Task" [ 1443.535966] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.547265] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.615331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.807396] env[63371]: DEBUG nova.compute.manager [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Received event network-vif-plugged-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1443.808632] env[63371]: DEBUG oslo_concurrency.lockutils [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] Acquiring lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.808632] env[63371]: DEBUG oslo_concurrency.lockutils [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.808632] env[63371]: DEBUG oslo_concurrency.lockutils [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.808632] env[63371]: DEBUG nova.compute.manager [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] No waiting events found dispatching network-vif-plugged-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1443.808632] env[63371]: WARNING nova.compute.manager [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Received unexpected event network-vif-plugged-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 for instance with vm_state building and task_state spawning. [ 1443.824371] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1443.826123] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b83a874-2b65-4177-afd5-aeace9bbdbda {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.836322] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1443.836322] env[63371]: ERROR oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk due to incomplete transfer. [ 1443.836650] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-39d9819a-7600-48ce-9481-67e9324e832b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.845241] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1443.845519] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Uploaded image 40f4ba8c-3aff-4162-89c5-27a0765d4f79 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1443.847674] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1443.848300] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-089ebcbc-1c7c-4467-89a2-dcdcca6395dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.853965] env[63371]: DEBUG nova.network.neutron [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.856283] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1443.860320] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1443.860320] env[63371]: value = "task-1773842" [ 1443.860320] env[63371]: _type = "Task" [ 1443.860320] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.876233] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773842, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.902108] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773837, 'name': CreateVM_Task, 'duration_secs': 1.100084} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.902303] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1443.903197] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.903456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.903698] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1443.904304] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4163bac7-af39-4d18-a2fe-c784ff80d942 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.912681] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1443.912681] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5f9a8-99c8-6f2b-e4ce-dae1135e4a23" [ 1443.912681] env[63371]: _type = "Task" [ 1443.912681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.921733] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5f9a8-99c8-6f2b-e4ce-dae1135e4a23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.049878] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773841, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.147531] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Successfully updated port: d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1444.317928] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Successfully created port: 767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1444.329884] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.330172] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.355650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.357496] env[63371]: DEBUG nova.objects.instance [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lazy-loading 'flavor' on Instance uuid b48a8e83-e581-4886-833b-bbce155d40d9 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1444.376238] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773842, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.425703] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5f9a8-99c8-6f2b-e4ce-dae1135e4a23, 'name': SearchDatastore_Task, 'duration_secs': 0.014635} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.429535] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.429813] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1444.430064] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.430302] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.430481] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1444.431396] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6b7fad1-3b44-4738-9a90-81aa1d7669c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.446376] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1444.446376] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1444.448046] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-939063f3-c3a1-4b00-b3ee-6b313b4c2ec0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.454853] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1444.454853] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52eaafd6-ced0-73cc-2356-680d1ecf9244" [ 1444.454853] env[63371]: _type = "Task" [ 1444.454853] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.468570] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eaafd6-ced0-73cc-2356-680d1ecf9244, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.508610] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a807119-41cd-499e-860a-e164376762c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.518606] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b500b0-f3d3-4cc9-9528-d0f38e2bdec5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.568636] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05b0edd-1576-452f-819a-1475f235224b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.584019] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773841, 'name': PowerOnVM_Task, 'duration_secs': 0.931959} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.584549] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.584656] env[63371]: INFO nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Took 8.06 seconds to spawn the instance on the hypervisor. [ 1444.584834] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1444.586193] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f09d57-2e05-4262-803c-7f152f611715 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.591687] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c843b0-b449-4ead-a316-e45f0104b34c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.610275] env[63371]: DEBUG nova.compute.provider_tree [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.650014] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.650211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.650373] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1444.863471] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18da847d-27f8-4471-8945-7e0e5b83fce5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.870419] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1444.883962] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773842, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.908235] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1444.908936] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc964641-11f0-4b78-9d3e-ece50123b444 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.918058] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1444.918058] env[63371]: value = "task-1773843" [ 1444.918058] env[63371]: _type = "Task" [ 1444.918058] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.920241] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1444.920503] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1444.920675] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.920877] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1444.921040] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.921194] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1444.921397] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1444.921553] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1444.921712] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1444.923197] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1444.923197] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1444.923197] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3464eda-8716-44cf-a2c7-72010fe959fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.936797] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.938139] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee660c4-747c-444b-a25f-8916c0302fb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.965602] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eaafd6-ced0-73cc-2356-680d1ecf9244, 'name': SearchDatastore_Task, 'duration_secs': 0.018315} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.966089] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa11a9f4-e693-4e0a-b076-a36ba517666c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.972999] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1444.972999] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524950dd-e090-14a7-d465-c0e1f8aedcaf" [ 1444.972999] env[63371]: _type = "Task" [ 1444.972999] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.982634] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524950dd-e090-14a7-d465-c0e1f8aedcaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.116202] env[63371]: DEBUG nova.scheduler.client.report [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1445.129744] env[63371]: INFO nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Took 50.17 seconds to build instance. [ 1445.197181] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1445.380880] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773842, 'name': Destroy_Task, 'duration_secs': 1.297574} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.381300] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Destroyed the VM [ 1445.381629] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1445.382493] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4cacd534-8fc9-430a-8074-43bb323b28e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.388105] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updating instance_info_cache with network_info: [{"id": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "address": "fa:16:3e:0c:22:88", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4ac9418-86", "ovs_interfaceid": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.394837] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1445.394837] env[63371]: value = "task-1773844" [ 1445.394837] env[63371]: _type = "Task" [ 1445.394837] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.406439] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773844, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.433136] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773843, 'name': PowerOffVM_Task, 'duration_secs': 0.23397} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.433431] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1445.438826] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfiguring VM instance instance-00000021 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1445.439123] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0195a87e-c4de-49d4-89d0-e1657297fb7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.462732] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1445.462732] env[63371]: value = "task-1773845" [ 1445.462732] env[63371]: _type = "Task" [ 1445.462732] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.464041] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1445.464041] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368298', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'name': 'volume-fd8f0908-509b-4986-8eae-d6db5f10b561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cbcdfe1a-86a4-4a12-99b5-44d291d41769', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'serial': 'fd8f0908-509b-4986-8eae-d6db5f10b561'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1445.464881] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0775f3e-d942-442c-86a0-6ca8ec9de675 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.493438] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773845, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.494288] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d60bab6-d595-469b-9ba7-83c1c2cc654b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.507036] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524950dd-e090-14a7-d465-c0e1f8aedcaf, 'name': SearchDatastore_Task, 'duration_secs': 0.024268} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.523409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.523746] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d/b5e259ea-d103-41c6-84b3-748813bb514d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1445.532796] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] volume-fd8f0908-509b-4986-8eae-d6db5f10b561/volume-fd8f0908-509b-4986-8eae-d6db5f10b561.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1445.533426] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ef7679f-c1e2-4660-b8cf-96128ad2e611 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.535728] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-405e5696-6bbc-4eae-a3dd-f1d2681d1b62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.558875] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1445.558875] env[63371]: value = "task-1773846" [ 1445.558875] env[63371]: _type = "Task" [ 1445.558875] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.561036] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Waiting for the task: (returnval){ [ 1445.561036] env[63371]: value = "task-1773847" [ 1445.561036] env[63371]: _type = "Task" [ 1445.561036] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.577516] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.577516] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773847, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.632817] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.410s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.634108] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.798s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.634578] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1445.638359] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.214s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.638560] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.640800] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.597s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.643958] env[63371]: INFO nova.compute.claims [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1445.680508] env[63371]: INFO nova.scheduler.client.report [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleted allocations for instance 852e14a7-2f9f-421c-9804-56c885885c7d [ 1445.896118] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.896118] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance network_info: |[{"id": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "address": "fa:16:3e:0c:22:88", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4ac9418-86", "ovs_interfaceid": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1445.896592] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:22:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4ac9418-864a-4adf-ab92-bb5c3dbb8ec9', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.906912] env[63371]: DEBUG oslo.service.loopingcall [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1445.908242] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1445.911328] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7edeb84b-4546-44cf-afe8-6d4e5b2283ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.930327] env[63371]: DEBUG nova.compute.manager [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Received event network-changed-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1445.930531] env[63371]: DEBUG nova.compute.manager [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Refreshing instance network info cache due to event network-changed-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1445.931306] env[63371]: DEBUG oslo_concurrency.lockutils [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] Acquiring lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.931306] env[63371]: DEBUG oslo_concurrency.lockutils [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] Acquired lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.931306] env[63371]: DEBUG nova.network.neutron [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Refreshing network info cache for port d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1445.939372] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773844, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.943021] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.943021] env[63371]: value = "task-1773848" [ 1445.943021] env[63371]: _type = "Task" [ 1445.943021] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.952537] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.974254] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773845, 'name': ReconfigVM_Task, 'duration_secs': 0.262068} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.974540] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfigured VM instance instance-00000021 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1445.974716] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.975399] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36931e09-261e-4613-ae42-7c5d664baa97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.983778] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1445.983778] env[63371]: value = "task-1773849" [ 1445.983778] env[63371]: _type = "Task" [ 1445.983778] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.001032] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.076295] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773846, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.081257] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773847, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.147614] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1446.153891] env[63371]: DEBUG nova.compute.utils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1446.153891] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1446.153891] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1446.192340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.944s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.283872] env[63371]: DEBUG nova.policy [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25f28e53648c41d1a147c1aa04f0a708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fb0da840f6847f19f03a1db8a1c3f4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1446.426778] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773844, 'name': RemoveSnapshot_Task, 'duration_secs': 0.907456} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.428983] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1446.428983] env[63371]: INFO nova.compute.manager [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 16.67 seconds to snapshot the instance on the hypervisor. [ 1446.461585] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.495437] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773849, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.579392] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719697} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.579829] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d/b5e259ea-d103-41c6-84b3-748813bb514d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1446.580324] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1446.580788] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773847, 'name': ReconfigVM_Task, 'duration_secs': 0.630077} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.581131] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62b3e9ba-95f3-4b28-9a1b-1c329aee767d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.583296] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfigured VM instance instance-0000000d to attach disk [datastore1] volume-fd8f0908-509b-4986-8eae-d6db5f10b561/volume-fd8f0908-509b-4986-8eae-d6db5f10b561.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1446.589065] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-209b7b5e-44a9-41aa-b048-dd5d9398c76a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.619052] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Waiting for the task: (returnval){ [ 1446.619052] env[63371]: value = "task-1773851" [ 1446.619052] env[63371]: _type = "Task" [ 1446.619052] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.621785] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1446.621785] env[63371]: value = "task-1773850" [ 1446.621785] env[63371]: _type = "Task" [ 1446.621785] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.637586] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.641715] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773851, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.661826] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1446.685167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.857273] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Successfully updated port: 767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1446.956889] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.006627] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773849, 'name': PowerOnVM_Task, 'duration_secs': 0.725611} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.007288] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1447.007288] env[63371]: DEBUG nova.compute.manager [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1447.010872] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb3e24c-f7c8-47d5-9509-875e55f8eec3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.142958] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773851, 'name': ReconfigVM_Task, 'duration_secs': 0.194442} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.146018] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117761} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.146018] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368298', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'name': 'volume-fd8f0908-509b-4986-8eae-d6db5f10b561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cbcdfe1a-86a4-4a12-99b5-44d291d41769', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'serial': 'fd8f0908-509b-4986-8eae-d6db5f10b561'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1447.146018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1447.146838] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8d8812-72e5-4639-9606-bc41ff030508 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.153714] env[63371]: DEBUG nova.network.neutron [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updated VIF entry in instance network info cache for port d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1447.154339] env[63371]: DEBUG nova.network.neutron [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updating instance_info_cache with network_info: [{"id": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "address": "fa:16:3e:0c:22:88", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4ac9418-86", "ovs_interfaceid": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.182446] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d/b5e259ea-d103-41c6-84b3-748813bb514d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1447.185908] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8de1ceea-d3b9-48c3-b541-30ab1d73c673 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.215956] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1447.215956] env[63371]: value = "task-1773852" [ 1447.215956] env[63371]: _type = "Task" [ 1447.215956] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.226706] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773852, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.235022] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Successfully created port: e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1447.361991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.361991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.361991] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1447.380018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d9b394-0ed1-4c3a-885b-b09dceea50d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.389634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d02c22-5573-4eed-b4b4-a29b05519d5c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.431557] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76b42c7-da87-4074-8373-5eec031af5dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.443961] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7fabcb-f7fa-4890-a1db-6e5909d4810e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.458992] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.467923] env[63371]: DEBUG nova.compute.provider_tree [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.658532] env[63371]: DEBUG oslo_concurrency.lockutils [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] Releasing lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.687672] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1447.732301] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773852, 'name': ReconfigVM_Task, 'duration_secs': 0.442157} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.735283] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Reconfigured VM instance instance-00000023 to attach disk [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d/b5e259ea-d103-41c6-84b3-748813bb514d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1447.736160] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-964b3210-3e95-43a0-80fb-827f9c181cae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.744873] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1447.745135] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1447.745260] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1447.745434] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1447.745626] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1447.745714] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1447.745956] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1447.746112] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1447.746292] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1447.746456] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1447.746626] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1447.747552] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec38edac-578e-4cd8-938e-f07bd56d3721 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.754308] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1447.754308] env[63371]: value = "task-1773853" [ 1447.754308] env[63371]: _type = "Task" [ 1447.754308] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.765395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845d4315-c85d-42bc-8b56-f021074d1056 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.774117] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773853, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.905014] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1447.962021] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.972555] env[63371]: DEBUG nova.scheduler.client.report [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1448.109459] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Updating instance_info_cache with network_info: [{"id": "767b2818-8eb3-4f76-8def-793f9f31a087", "address": "fa:16:3e:99:fa:22", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767b2818-8e", "ovs_interfaceid": "767b2818-8eb3-4f76-8def-793f9f31a087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.238814] env[63371]: DEBUG nova.objects.instance [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lazy-loading 'flavor' on Instance uuid cbcdfe1a-86a4-4a12-99b5-44d291d41769 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1448.248515] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Received event network-vif-plugged-767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.248515] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquiring lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.248515] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.248515] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.248515] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] No waiting events found dispatching network-vif-plugged-767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1448.248770] env[63371]: WARNING nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Received unexpected event network-vif-plugged-767b2818-8eb3-4f76-8def-793f9f31a087 for instance with vm_state building and task_state spawning. [ 1448.248770] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received event network-changed-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.248770] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Refreshing instance network info cache due to event network-changed-1d08ea03-4a7c-43bc-9a11-db1f92c6c505. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1448.248770] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquiring lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.248770] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquired lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.248954] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Refreshing network info cache for port 1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1448.264516] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773853, 'name': Rename_Task, 'duration_secs': 0.174556} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.265130] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1448.265473] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f69eefce-fff5-4555-999e-406e89607884 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.278744] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1448.278744] env[63371]: value = "task-1773854" [ 1448.278744] env[63371]: _type = "Task" [ 1448.278744] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.289398] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.460730] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task, 'duration_secs': 2.088981} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.461893] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1448.461893] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.463411] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.465526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1448.465526] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b96898e-05c6-4433-9135-e958113eac53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.472066] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1448.472066] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5222c44c-3750-657e-c552-313f94e4b67f" [ 1448.472066] env[63371]: _type = "Task" [ 1448.472066] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.478254] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.837s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.478760] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1448.486297] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.783s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.487874] env[63371]: INFO nova.compute.claims [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1448.507858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "b48a8e83-e581-4886-833b-bbce155d40d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.507858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.507858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.507858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.508440] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.508440] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5222c44c-3750-657e-c552-313f94e4b67f, 'name': SearchDatastore_Task, 'duration_secs': 0.013878} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.508574] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.508739] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1448.508950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.509111] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.509317] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1448.509858] env[63371]: INFO nova.compute.manager [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Terminating instance [ 1448.514427] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-504125f7-855b-4b8f-b9d5-5edddbbb1369 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.518069] env[63371]: DEBUG nova.compute.manager [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1448.519041] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1448.519231] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77edf355-cd23-4a41-bc4e-e28f6863e15a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.528258] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1448.528532] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09972cc8-a1ca-440e-8a90-a9416875c646 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.531335] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1448.532860] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1448.532860] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef51b12a-bba3-4133-8057-449e77801e4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.540457] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1448.540457] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5284158b-81f6-a2cf-351e-2d67020d4835" [ 1448.540457] env[63371]: _type = "Task" [ 1448.540457] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.540802] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1448.540802] env[63371]: value = "task-1773855" [ 1448.540802] env[63371]: _type = "Task" [ 1448.540802] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.555415] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773855, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.560633] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5284158b-81f6-a2cf-351e-2d67020d4835, 'name': SearchDatastore_Task, 'duration_secs': 0.012408} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.561827] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56c8974f-f9f3-43c7-9652-09e5678d76d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.570807] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1448.570807] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5260a9bf-30f0-495d-6297-4213bfe2f638" [ 1448.570807] env[63371]: _type = "Task" [ 1448.570807] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.581539] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5260a9bf-30f0-495d-6297-4213bfe2f638, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.612639] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.613141] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance network_info: |[{"id": "767b2818-8eb3-4f76-8def-793f9f31a087", "address": "fa:16:3e:99:fa:22", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767b2818-8e", "ovs_interfaceid": "767b2818-8eb3-4f76-8def-793f9f31a087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1448.614043] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:fa:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '767b2818-8eb3-4f76-8def-793f9f31a087', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1448.621500] env[63371]: DEBUG oslo.service.loopingcall [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1448.621960] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1448.621960] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90b31ea5-4f6e-4dc8-9f03-e64f467ae279 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.647834] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1448.647834] env[63371]: value = "task-1773856" [ 1448.647834] env[63371]: _type = "Task" [ 1448.647834] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.659412] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773856, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.746128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.942s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.793766] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773854, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.885361] env[63371]: DEBUG nova.compute.manager [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1448.886379] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee5d03b-cff4-4c76-a813-e14342b7e86f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.000857] env[63371]: DEBUG nova.compute.utils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1449.001174] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1449.055275] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773855, 'name': PowerOffVM_Task, 'duration_secs': 0.205691} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.055578] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1449.056463] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1449.056463] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bbee63a-6a58-4a8e-bea0-023492208035 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.083128] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5260a9bf-30f0-495d-6297-4213bfe2f638, 'name': SearchDatastore_Task, 'duration_secs': 0.011996} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.083188] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.084166] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379/fb2ddd3e-7adc-4a34-8797-0e98fdf19379.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1449.084166] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd7be5b3-8cf3-41a6-9f3b-699a45339673 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.094383] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1449.094383] env[63371]: value = "task-1773858" [ 1449.094383] env[63371]: _type = "Task" [ 1449.094383] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.105887] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773858, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.145681] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1449.145836] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1449.146028] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleting the datastore file [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1449.146305] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a266d8c0-9857-438a-84c7-f93059c9ef97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.161168] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1449.161168] env[63371]: value = "task-1773859" [ 1449.161168] env[63371]: _type = "Task" [ 1449.161168] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.165091] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773856, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.228581] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updated VIF entry in instance network info cache for port 1d08ea03-4a7c-43bc-9a11-db1f92c6c505. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1449.228951] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updating instance_info_cache with network_info: [{"id": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "address": "fa:16:3e:ac:de:06", "network": {"id": "50743102-4d46-4fa2-b7b2-9d99ef746757", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-137039111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "941e719c77a84e8d8fe0107968a0f527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d08ea03-4a", "ovs_interfaceid": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.246944] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Successfully updated port: e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1449.290598] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773854, 'name': PowerOnVM_Task, 'duration_secs': 0.519661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.290875] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1449.291091] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1449.291279] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1449.292119] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353ca6ae-96c2-4413-a405-76a74c259bbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.406032] env[63371]: INFO nova.compute.manager [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] instance snapshotting [ 1449.408742] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1b3fd5-adfc-411e-9af8-637336edb45a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.430859] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b1e3fa-48b1-4dad-8956-256e2fc11819 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.263178] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1450.268421] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Releasing lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.268860] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Received event network-changed-767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.269052] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Refreshing instance network info cache due to event network-changed-767b2818-8eb3-4f76-8def-793f9f31a087. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1450.269257] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquiring lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.269443] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquired lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.269606] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Refreshing network info cache for port 767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1450.271153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.271271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.271419] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1450.276704] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1450.290917] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1fe61b5f-40eb-440d-9fa3-5b7c9aa8a59c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.299982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "6df9af10-0053-4696-920a-10ab2af67ef5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.299982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "6df9af10-0053-4696-920a-10ab2af67ef5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.301162] env[63371]: DEBUG nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Received event network-vif-plugged-e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.301355] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Acquiring lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.301554] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.301719] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.301893] env[63371]: DEBUG nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] No waiting events found dispatching network-vif-plugged-e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1450.302193] env[63371]: WARNING nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Received unexpected event network-vif-plugged-e4eb0664-61b0-40ee-a907-faa96a4e1c4d for instance with vm_state building and task_state spawning. [ 1450.302384] env[63371]: DEBUG nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Received event network-changed-e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.302552] env[63371]: DEBUG nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Refreshing instance network info cache due to event network-changed-e4eb0664-61b0-40ee-a907-faa96a4e1c4d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1450.302717] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Acquiring lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.305907] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Took 52.56 seconds to build instance. [ 1450.316687] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.685923} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.316940] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773858, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765926} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.321459] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1450.321673] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1450.321841] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1450.322184] env[63371]: INFO nova.compute.manager [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Took 1.80 seconds to destroy the instance on the hypervisor. [ 1450.322291] env[63371]: DEBUG oslo.service.loopingcall [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.322503] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379/fb2ddd3e-7adc-4a34-8797-0e98fdf19379.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1450.322694] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1450.322988] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1450.322988] env[63371]: value = "task-1773860" [ 1450.322988] env[63371]: _type = "Task" [ 1450.322988] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.324370] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773856, 'name': CreateVM_Task, 'duration_secs': 0.669174} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.326971] env[63371]: DEBUG nova.compute.manager [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1450.327106] env[63371]: DEBUG nova.network.neutron [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1450.328938] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40062222-d543-4224-b585-c1b9b61de33c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.331359] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1450.335415] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.335595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.335877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1450.337133] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c921b3f-422a-48dc-ad32-67aaa5b86404 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.349059] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773860, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.349425] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.349425] env[63371]: value = "task-1773861" [ 1450.349425] env[63371]: _type = "Task" [ 1450.349425] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.349651] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.349651] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52df57c5-c098-15ee-b6bd-b2b44558f6fe" [ 1450.349651] env[63371]: _type = "Task" [ 1450.349651] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.363968] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.370385] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df57c5-c098-15ee-b6bd-b2b44558f6fe, 'name': SearchDatastore_Task, 'duration_secs': 0.018143} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.372889] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.373163] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1450.373423] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.373579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.373764] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1450.374429] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f462aaa7-6ee7-42ac-826a-7d19684734a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.391317] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1450.391572] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1450.397999] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a457687a-d4cf-480a-9a0c-66518f6ff174 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.404029] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.404029] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b2f70c-5be0-6202-f734-1b03672ebe29" [ 1450.404029] env[63371]: _type = "Task" [ 1450.404029] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.413135] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b2f70c-5be0-6202-f734-1b03672ebe29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.805425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.806094] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.810383] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.222s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.825669] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a94a0cb-009f-4e08-84f9-a350d92f8e84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.840230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e791ed-4b79-4ff1-b371-00ecd562cb5a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.852939] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773860, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.878373] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1450.884958] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe41a9b-5229-4e06-86d8-55c6385110da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.894766] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.20166} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.895968] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafa8871-b408-4dd2-8fa2-8bf95e83a08e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.900424] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1450.901395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20483106-ed22-49da-b45f-114716dd155f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.925808] env[63371]: DEBUG nova.compute.provider_tree [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.937748] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379/fb2ddd3e-7adc-4a34-8797-0e98fdf19379.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1450.943183] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-851db88d-d7e5-4aa6-8fae-fa2f3e0f6088 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.964246] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b2f70c-5be0-6202-f734-1b03672ebe29, 'name': SearchDatastore_Task, 'duration_secs': 0.038879} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.965964] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-774b7d7e-eaac-4236-a227-39f51002c9d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.971269] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.971269] env[63371]: value = "task-1773862" [ 1450.971269] env[63371]: _type = "Task" [ 1450.971269] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.972881] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.972881] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520f5697-46e3-7d6f-a8ce-4c678159a12e" [ 1450.972881] env[63371]: _type = "Task" [ 1450.972881] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.985115] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520f5697-46e3-7d6f-a8ce-4c678159a12e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.988335] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773862, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.079745] env[63371]: DEBUG nova.network.neutron [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.274386] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1451.289853] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Updating instance_info_cache with network_info: [{"id": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "address": "fa:16:3e:82:53:c9", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eb0664-61", "ovs_interfaceid": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.309705] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1451.310385] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1451.310385] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1451.310385] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1451.313393] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1451.313393] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1451.313393] env[63371]: INFO nova.compute.manager [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Detaching volume fd8f0908-509b-4986-8eae-d6db5f10b561 [ 1451.314898] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1451.317981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a534a387-119a-44fc-a8c2-56180c21e8b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.332163] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Updated VIF entry in instance network info cache for port 767b2818-8eb3-4f76-8def-793f9f31a087. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1451.332693] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Updating instance_info_cache with network_info: [{"id": "767b2818-8eb3-4f76-8def-793f9f31a087", "address": "fa:16:3e:99:fa:22", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767b2818-8e", "ovs_interfaceid": "767b2818-8eb3-4f76-8def-793f9f31a087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.336468] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082b1769-69fa-4634-9154-7e3a3d248888 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.361773] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773860, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.362332] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1451.368694] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Creating folder: Project (d67736854dbf430f8eae90eb3d8e4bb8). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.369415] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f93e2063-fa89-4f20-b18c-e03c8f759f39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.372553] env[63371]: INFO nova.virt.block_device [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Attempting to driver detach volume fd8f0908-509b-4986-8eae-d6db5f10b561 from mountpoint /dev/sdb [ 1451.372891] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1451.372982] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368298', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'name': 'volume-fd8f0908-509b-4986-8eae-d6db5f10b561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cbcdfe1a-86a4-4a12-99b5-44d291d41769', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'serial': 'fd8f0908-509b-4986-8eae-d6db5f10b561'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1451.373839] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de51d5da-ea29-4c30-8c62-732f736aaf16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.399498] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36723f01-1276-45a8-aaf7-0223faa00e98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.402169] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Created folder: Project (d67736854dbf430f8eae90eb3d8e4bb8) in parent group-v368199. [ 1451.402169] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Creating folder: Instances. Parent ref: group-v368304. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.402739] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64366396-191d-4b8b-98f8-e2e43f85e265 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.412322] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980c0cc7-a6d1-49fe-b42f-19038e45280d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.417609] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Created folder: Instances in parent group-v368304. [ 1451.417945] env[63371]: DEBUG oslo.service.loopingcall [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.418222] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1451.418473] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bbcdf46-1ca4-495d-8c87-b3add02a0af7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.449634] env[63371]: DEBUG nova.scheduler.client.report [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1451.457235] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1787f780-70f8-4b27-8dc7-9d2c351716e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.459422] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1451.459422] env[63371]: value = "task-1773865" [ 1451.459422] env[63371]: _type = "Task" [ 1451.459422] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.475451] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] The volume has not been displaced from its original location: [datastore1] volume-fd8f0908-509b-4986-8eae-d6db5f10b561/volume-fd8f0908-509b-4986-8eae-d6db5f10b561.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1451.483286] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfiguring VM instance instance-0000000d to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1451.488313] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4d72599-0ec0-445c-a937-df8d1e870722 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.509091] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773865, 'name': CreateVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.517088] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520f5697-46e3-7d6f-a8ce-4c678159a12e, 'name': SearchDatastore_Task, 'duration_secs': 0.02552} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.521109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.521436] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4/36b81143-211f-4c77-854b-abe0d3f39ce4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1451.521798] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Waiting for the task: (returnval){ [ 1451.521798] env[63371]: value = "task-1773866" [ 1451.521798] env[63371]: _type = "Task" [ 1451.521798] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.522087] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.522343] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97203504-306b-409b-b966-6d21b1f77743 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.535765] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773866, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.536883] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1451.536883] env[63371]: value = "task-1773867" [ 1451.536883] env[63371]: _type = "Task" [ 1451.536883] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.546879] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.582597] env[63371]: INFO nova.compute.manager [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Took 1.26 seconds to deallocate network for instance. [ 1451.792681] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.793154] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance network_info: |[{"id": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "address": "fa:16:3e:82:53:c9", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eb0664-61", "ovs_interfaceid": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1451.793407] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Acquired lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.793821] env[63371]: DEBUG nova.network.neutron [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Refreshing network info cache for port e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1451.795980] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:53:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4eb0664-61b0-40ee-a907-faa96a4e1c4d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1451.806301] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating folder: Project (9fb0da840f6847f19f03a1db8a1c3f4f). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.811660] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-612c8247-28f0-4c1e-9b84-25503bd273e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.831404] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created folder: Project (9fb0da840f6847f19f03a1db8a1c3f4f) in parent group-v368199. [ 1451.831807] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating folder: Instances. Parent ref: group-v368308. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.832392] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57abe06b-0db8-40d6-81c6-2f9ee23ee2ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.843462] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Releasing lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.852533] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created folder: Instances in parent group-v368308. [ 1451.852533] env[63371]: DEBUG oslo.service.loopingcall [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.852533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.853373] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1451.854028] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5fe1fcc-ec98-4841-a808-77653526be32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.884039] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773860, 'name': CreateSnapshot_Task, 'duration_secs': 1.322884} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.884039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1451.884364] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2a9c67-0a93-4c39-8d31-dbc3c248e1d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.893031] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1451.893031] env[63371]: value = "task-1773870" [ 1451.893031] env[63371]: _type = "Task" [ 1451.893031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.908558] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773870, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.958401] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.476s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.960039] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1451.962502] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.085s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.964053] env[63371]: INFO nova.compute.claims [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1451.984514] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773865, 'name': CreateVM_Task, 'duration_secs': 0.451299} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.984514] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1451.985364] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.985594] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.985963] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1451.986310] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3045158-deca-4d3c-966c-040e662a0ec9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.991673] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773862, 'name': ReconfigVM_Task, 'duration_secs': 0.649441} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.992423] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Reconfigured VM instance instance-00000024 to attach disk [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379/fb2ddd3e-7adc-4a34-8797-0e98fdf19379.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1451.993111] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c30209f-b257-4b71-94b4-96dca65031d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.996514] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1451.996514] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525841ef-eae5-418f-67fc-259b9c97ffad" [ 1451.996514] env[63371]: _type = "Task" [ 1451.996514] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.003388] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.003662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.004299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.004299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.004299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.011665] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1452.011665] env[63371]: value = "task-1773871" [ 1452.011665] env[63371]: _type = "Task" [ 1452.011665] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.012105] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525841ef-eae5-418f-67fc-259b9c97ffad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.012735] env[63371]: INFO nova.compute.manager [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Terminating instance [ 1452.019776] env[63371]: DEBUG nova.compute.manager [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1452.019776] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1452.020947] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d2600f-dd1a-4a56-ac75-ff9cfe95ff1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.032477] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773871, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.038349] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1452.042782] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f98643b-82f5-4393-9d3f-51b14395f8fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.050352] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773866, 'name': ReconfigVM_Task, 'duration_secs': 0.36632} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.050352] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfigured VM instance instance-0000000d to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1452.059972] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7582ab9f-7a01-4153-8298-92a8468734f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.071187] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1452.071187] env[63371]: value = "task-1773872" [ 1452.071187] env[63371]: _type = "Task" [ 1452.071187] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.071435] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773867, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.084131] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773872, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.088630] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.088989] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Waiting for the task: (returnval){ [ 1452.088989] env[63371]: value = "task-1773873" [ 1452.088989] env[63371]: _type = "Task" [ 1452.088989] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.102023] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773873, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.317349] env[63371]: DEBUG nova.network.neutron [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Updated VIF entry in instance network info cache for port e4eb0664-61b0-40ee-a907-faa96a4e1c4d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1452.318535] env[63371]: DEBUG nova.network.neutron [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Updating instance_info_cache with network_info: [{"id": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "address": "fa:16:3e:82:53:c9", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eb0664-61", "ovs_interfaceid": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.327676] env[63371]: DEBUG nova.compute.manager [req-ab28a0ae-1bb9-4ca4-8358-d9200e975921 req-313fd6ed-cfba-4d65-a52b-6b7a5893cad4 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Received event network-vif-deleted-9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1452.409661] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1452.409932] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773870, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.410179] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5d4363b0-ad4c-425e-b333-e50efcbaf3b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.419429] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1452.419429] env[63371]: value = "task-1773874" [ 1452.419429] env[63371]: _type = "Task" [ 1452.419429] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.429502] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.473524] env[63371]: DEBUG nova.compute.utils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1452.474600] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1452.474763] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1452.516924] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525841ef-eae5-418f-67fc-259b9c97ffad, 'name': SearchDatastore_Task, 'duration_secs': 0.057646} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.521435] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.522109] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1452.524015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.525049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.525049] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1452.529087] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae8f7396-b02e-4b45-bbcf-92a240dcc370 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.540106] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773871, 'name': Rename_Task, 'duration_secs': 0.31484} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.542712] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1452.543673] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85692c0b-c9d3-40b5-b166-225a47232131 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.551785] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1452.551785] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1452.552867] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-480ea262-1d83-4894-b7d9-e0112964891b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.561642] env[63371]: DEBUG nova.policy [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc60aa7184b4427291f5766e345bc854', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1771acadeced40a6889b7dfb974e7886', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1452.563794] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68683} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.565804] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4/36b81143-211f-4c77-854b-abe0d3f39ce4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1452.566144] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1452.566901] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1452.566901] env[63371]: value = "task-1773875" [ 1452.566901] env[63371]: _type = "Task" [ 1452.566901] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.567122] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3df48c2b-a20f-4e2b-b35f-8c0444ba24ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.574714] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1452.574714] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523a4ec3-ae5c-0064-631d-96a08cd37864" [ 1452.574714] env[63371]: _type = "Task" [ 1452.574714] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.589577] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.590282] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1452.590282] env[63371]: value = "task-1773876" [ 1452.590282] env[63371]: _type = "Task" [ 1452.590282] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.608124] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773872, 'name': PowerOffVM_Task, 'duration_secs': 0.398223} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.608898] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523a4ec3-ae5c-0064-631d-96a08cd37864, 'name': SearchDatastore_Task, 'duration_secs': 0.021331} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.610187] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1452.610345] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1452.611682] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bfc5807-c5ef-47d3-b41d-b85769745f23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.613041] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6d5262e-2ab5-4ee4-88a4-37d3dd68dc42 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.622806] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773873, 'name': ReconfigVM_Task, 'duration_secs': 0.215725} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.628591] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368298', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'name': 'volume-fd8f0908-509b-4986-8eae-d6db5f10b561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cbcdfe1a-86a4-4a12-99b5-44d291d41769', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'serial': 'fd8f0908-509b-4986-8eae-d6db5f10b561'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1452.631095] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773876, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.631524] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1452.631524] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5228166a-0026-b8f5-c66b-bf2c4e135655" [ 1452.631524] env[63371]: _type = "Task" [ 1452.631524] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.645961] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5228166a-0026-b8f5-c66b-bf2c4e135655, 'name': SearchDatastore_Task, 'duration_secs': 0.015674} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.645961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.646372] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c/50d5eac1-0752-4089-948c-b04439df6f6c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1452.646853] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-932a02c0-e597-497f-9d0a-2bad813b30a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.657330] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1452.657330] env[63371]: value = "task-1773878" [ 1452.657330] env[63371]: _type = "Task" [ 1452.657330] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.673298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "195de525-1081-4db6-acf3-04a6d3eb142f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.676729] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "195de525-1081-4db6-acf3-04a6d3eb142f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.676729] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.719069] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1452.719405] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1452.719705] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleting the datastore file [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1452.721020] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcb0c80d-f6a8-4296-952a-eba24d542ca9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.730256] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1452.730256] env[63371]: value = "task-1773879" [ 1452.730256] env[63371]: _type = "Task" [ 1452.730256] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.744579] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.822469] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Releasing lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.907107] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773870, 'name': CreateVM_Task, 'duration_secs': 0.629293} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.907107] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1452.907107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.907232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.907762] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1452.908120] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be87eea3-b7ad-45c8-a6b1-dd69a4518b4a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.915837] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1452.915837] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd5b76-04b8-f295-feaa-8cf8df4fc7e9" [ 1452.915837] env[63371]: _type = "Task" [ 1452.915837] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.928660] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Successfully created port: 78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1452.937664] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd5b76-04b8-f295-feaa-8cf8df4fc7e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.948333] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.979774] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1453.087794] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task} progress is 76%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.106457] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773876, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125268} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.109578] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1453.110886] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359befaa-f172-40b1-a9c4-63091c4b73af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.153995] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4/36b81143-211f-4c77-854b-abe0d3f39ce4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1453.158989] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f05bf0d-008e-4743-b6e8-8abd290d0407 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.200757] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773878, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.209523] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1453.209523] env[63371]: value = "task-1773880" [ 1453.209523] env[63371]: _type = "Task" [ 1453.209523] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.215945] env[63371]: DEBUG nova.objects.instance [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lazy-loading 'flavor' on Instance uuid cbcdfe1a-86a4-4a12-99b5-44d291d41769 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1453.224385] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.242320] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.428823] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd5b76-04b8-f295-feaa-8cf8df4fc7e9, 'name': SearchDatastore_Task, 'duration_secs': 0.036417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.433054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.433437] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1453.433786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.434043] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.434370] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1453.434998] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4dbf7474-5041-4f61-ac62-0d973a311153 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.441175] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.459032] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1453.459435] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1453.463021] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bda0830-d0e7-4e22-80be-2b6af5f6cd92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.476382] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1453.476382] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52df23e6-d995-7627-95cc-e6f9648c4067" [ 1453.476382] env[63371]: _type = "Task" [ 1453.476382] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.498567] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df23e6-d995-7627-95cc-e6f9648c4067, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.592598] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task} progress is 76%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.699692] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773878, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.038336} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.700642] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c/50d5eac1-0752-4089-948c-b04439df6f6c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1453.700642] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1453.700815] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46b2aafb-f061-4b85-9791-ab3c7f9306e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.717185] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1453.717185] env[63371]: value = "task-1773881" [ 1453.717185] env[63371]: _type = "Task" [ 1453.717185] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.725504] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.733645] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773881, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.748738] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.619992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.749011] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1453.749209] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1453.749385] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1453.749553] env[63371]: INFO nova.compute.manager [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Took 1.73 seconds to destroy the instance on the hypervisor. [ 1453.749797] env[63371]: DEBUG oslo.service.loopingcall [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.749986] env[63371]: DEBUG nova.compute.manager [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1453.750097] env[63371]: DEBUG nova.network.neutron [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1453.757861] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2118ecb-75a7-483a-b371-94118303dbdf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.767126] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1745e5-898c-4e6c-9864-d3fdcdd95b98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.800437] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6e2429-2a0a-47f1-a332-8a10cc136404 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.810400] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4b7431-dd02-49df-9b47-00eabbca6086 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.828076] env[63371]: DEBUG nova.compute.provider_tree [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.934374] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.990662] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df23e6-d995-7627-95cc-e6f9648c4067, 'name': SearchDatastore_Task, 'duration_secs': 0.063338} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.991704] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55e9deab-f223-4229-9896-34d9a37825d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.000780] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1454.003036] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1454.003036] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5294f5f8-4a47-55b1-b253-66dd4af369a3" [ 1454.003036] env[63371]: _type = "Task" [ 1454.003036] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.023009] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5294f5f8-4a47-55b1-b253-66dd4af369a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.046612] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1454.047485] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1454.047485] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1454.049135] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1454.049379] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1454.049553] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1454.049798] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1454.049977] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1454.050193] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1454.050376] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1454.050610] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1454.051686] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21506b4-2ef8-4dc7-8942-16cd3e50a784 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.061019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4e2c6b-3035-487c-a63d-154fc012c1d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.095329] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.227874] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773880, 'name': ReconfigVM_Task, 'duration_secs': 0.914051} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.228780] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4/36b81143-211f-4c77-854b-abe0d3f39ce4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1454.229546] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-632880ea-06c8-4825-b21a-890b300ddf51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.235636] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773881, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117482} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.236804] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1454.237270] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.432s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.242025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e331674e-31c1-484f-a8eb-ccac503185e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.243986] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1454.243986] env[63371]: value = "task-1773882" [ 1454.243986] env[63371]: _type = "Task" [ 1454.243986] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.264218] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c/50d5eac1-0752-4089-948c-b04439df6f6c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.265971] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04f70dd6-e984-4537-b8e0-df0b0b4a4db6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.285368] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773882, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.293651] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1454.293651] env[63371]: value = "task-1773883" [ 1454.293651] env[63371]: _type = "Task" [ 1454.293651] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.306837] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.331888] env[63371]: DEBUG nova.scheduler.client.report [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1454.433833] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.515786] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5294f5f8-4a47-55b1-b253-66dd4af369a3, 'name': SearchDatastore_Task, 'duration_secs': 0.020914} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.516105] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.516375] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f/855005ae-3b0e-4ad7-80cf-266075fc6d0f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1454.516689] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d68d3396-ac3d-49ba-9c1f-656ed420804d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.527824] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1454.527824] env[63371]: value = "task-1773884" [ 1454.527824] env[63371]: _type = "Task" [ 1454.527824] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.544717] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.591555] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.591773] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.591911] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.592100] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.592377] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.597961] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task, 'duration_secs': 1.548448} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.598542] env[63371]: INFO nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Terminating instance [ 1454.600276] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1454.600509] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Took 12.52 seconds to spawn the instance on the hypervisor. [ 1454.600691] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1454.605525] env[63371]: DEBUG nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1454.607080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1454.607080] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331507ca-42f5-4dce-bc4c-54ff76a101a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.613742] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa1fc4f6-3177-4159-9906-965a9808a45e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.628669] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1454.628669] env[63371]: value = "task-1773885" [ 1454.628669] env[63371]: _type = "Task" [ 1454.628669] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.756986] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773882, 'name': Rename_Task, 'duration_secs': 0.356163} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.757386] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1454.757676] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f53f4ac8-3cc9-420f-8d27-ff924b03cfc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.766708] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1454.766708] env[63371]: value = "task-1773887" [ 1454.766708] env[63371]: _type = "Task" [ 1454.766708] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.780440] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.812312] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773883, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.842085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.879s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.842446] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1454.845701] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.969s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.846049] env[63371]: DEBUG nova.objects.instance [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lazy-loading 'resources' on Instance uuid 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1454.934732] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.041268] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773884, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.142701] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Took 51.67 seconds to build instance. [ 1455.147970] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773885, 'name': PowerOffVM_Task, 'duration_secs': 0.301756} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.150097] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1455.150097] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1455.150097] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1455.150097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d14f97b-ee6d-4887-a9b1-ee04230f29aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.177756] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d45058-c151-4f2f-8fc3-876ed9ce581d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.190529] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a11b3b-cd5a-4824-9f58-905bc4bf0bcf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.219176] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0237dd90-0ef6-4c61-9e29-c737e658e4e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.242806] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] The volume has not been displaced from its original location: [datastore1] volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8/volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1455.248380] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Reconfiguring VM instance instance-0000001a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1455.248843] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec68650d-2746-4fd7-9d8e-629ef5eb9a32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.273195] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1455.273195] env[63371]: value = "task-1773888" [ 1455.273195] env[63371]: _type = "Task" [ 1455.273195] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.288869] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.293270] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773887, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.307684] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773883, 'name': ReconfigVM_Task, 'duration_secs': 0.936746} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.311278] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c/50d5eac1-0752-4089-948c-b04439df6f6c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1455.311278] env[63371]: DEBUG nova.compute.manager [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Received event network-vif-plugged-78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1455.311278] env[63371]: DEBUG oslo_concurrency.lockutils [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.311686] env[63371]: DEBUG oslo_concurrency.lockutils [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.311686] env[63371]: DEBUG oslo_concurrency.lockutils [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.311897] env[63371]: DEBUG nova.compute.manager [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] No waiting events found dispatching network-vif-plugged-78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1455.312121] env[63371]: WARNING nova.compute.manager [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Received unexpected event network-vif-plugged-78d7a9b4-2512-4b55-95e3-50aa146658fa for instance with vm_state building and task_state spawning. [ 1455.312549] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf59f56b-6dd0-4f6c-a1be-bf1f920f2e9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.324137] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1455.324137] env[63371]: value = "task-1773889" [ 1455.324137] env[63371]: _type = "Task" [ 1455.324137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.331888] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773889, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.353026] env[63371]: DEBUG nova.compute.utils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1455.357757] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1455.358244] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1455.435469] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.492887] env[63371]: DEBUG nova.policy [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58195dc4ac74493cbe7ed4fbe63bce54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cc236260a947899c5e09bca25f7360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1455.545110] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.838766} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.547862] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f/855005ae-3b0e-4ad7-80cf-266075fc6d0f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1455.548636] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1455.549197] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af2baa94-702b-43ee-9d33-260183bacd93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.558353] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1455.558353] env[63371]: value = "task-1773890" [ 1455.558353] env[63371]: _type = "Task" [ 1455.558353] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.572617] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.621748] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Successfully updated port: 78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1455.645652] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.984s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.784384] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773887, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.789605] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773888, 'name': ReconfigVM_Task, 'duration_secs': 0.489789} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.792612] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Reconfigured VM instance instance-0000001a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1455.795640] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c523953f-5114-4ff1-8d3f-36e327d98b85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.816576] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1455.816576] env[63371]: value = "task-1773891" [ 1455.816576] env[63371]: _type = "Task" [ 1455.816576] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.826021] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773891, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.841123] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773889, 'name': Rename_Task, 'duration_secs': 0.421885} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.841494] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1455.841757] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bdbcb7e-56f7-4a87-8b1c-7f16381458c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.852348] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1455.852348] env[63371]: value = "task-1773892" [ 1455.852348] env[63371]: _type = "Task" [ 1455.852348] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.859118] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1455.871375] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.941501] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task, 'duration_secs': 3.321967} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.941501] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created linked-clone VM from snapshot [ 1455.941501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441320a7-b320-4b4a-bc61-90fdab267dd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.952011] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Uploading image d6a027d0-1605-4385-9e91-38b4326d06e7 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1455.986995] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1455.986995] env[63371]: value = "vm-368311" [ 1455.986995] env[63371]: _type = "VirtualMachine" [ 1455.986995] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1455.987336] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5e5e3b16-5722-4946-9204-489397ea8335 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.999403] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease: (returnval){ [ 1455.999403] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1455.999403] env[63371]: _type = "HttpNfcLease" [ 1455.999403] env[63371]: } obtained for exporting VM: (result){ [ 1455.999403] env[63371]: value = "vm-368311" [ 1455.999403] env[63371]: _type = "VirtualMachine" [ 1455.999403] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1455.999722] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the lease: (returnval){ [ 1455.999722] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1455.999722] env[63371]: _type = "HttpNfcLease" [ 1455.999722] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1456.005053] env[63371]: DEBUG nova.network.neutron [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.008486] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1456.008486] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1456.008486] env[63371]: _type = "HttpNfcLease" [ 1456.008486] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1456.037525] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac614c17-a719-4628-8643-cde3b485ffe2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.046556] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddeda3b9-0e91-4795-89b0-c8da7815a609 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.085025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9318c52c-98c1-4137-9975-7e8e04bba280 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.096328] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5d2313-6405-42d3-8c0f-53aa2872ab73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.102079] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088154} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.102079] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1456.102079] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a65c427-e1f2-408b-8036-e8a98ce4e4bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.113181] env[63371]: DEBUG nova.compute.provider_tree [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1456.128059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1456.128059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.128059] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1456.139625] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f/855005ae-3b0e-4ad7-80cf-266075fc6d0f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1456.141893] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04ffa1ec-6b2c-4043-b3b2-a82636af185b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.156272] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1456.165828] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Successfully created port: 59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1456.169288] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1456.169288] env[63371]: value = "task-1773894" [ 1456.169288] env[63371]: _type = "Task" [ 1456.169288] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.181011] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773894, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.281029] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773887, 'name': PowerOnVM_Task, 'duration_secs': 1.301992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.281401] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1456.281546] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Took 11.41 seconds to spawn the instance on the hypervisor. [ 1456.281728] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1456.282589] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f383b5b-4f85-49fb-bf07-60987a6758ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.327012] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773891, 'name': ReconfigVM_Task, 'duration_secs': 0.206} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.327347] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1456.327634] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1456.328462] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e52de6c-c076-42d7-958c-81e08e56d95d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.336594] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1456.336838] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a5fbd98-30cb-44c8-ac17-42164d155587 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.371526] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773892, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.505087] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1456.505416] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1456.505517] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Deleting the datastore file [datastore1] e00c2e45-b8bc-440b-8b58-a21f127192c7 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1456.505816] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a224b713-800e-4b0a-88c6-dfbc9a2ba9ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.510299] env[63371]: INFO nova.compute.manager [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Took 2.76 seconds to deallocate network for instance. [ 1456.514020] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1456.514020] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1456.514020] env[63371]: _type = "HttpNfcLease" [ 1456.514020] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1456.518981] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1456.518981] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1456.518981] env[63371]: _type = "HttpNfcLease" [ 1456.518981] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1456.518981] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1456.518981] env[63371]: value = "task-1773896" [ 1456.518981] env[63371]: _type = "Task" [ 1456.518981] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.520104] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e70c977-9101-4f5c-a277-5e529b08c0b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.532133] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.535173] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1456.535395] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1456.638857] env[63371]: ERROR nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [req-3e64947c-9d5b-4ca7-8e2e-9b82f9ffcabf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3e64947c-9d5b-4ca7-8e2e-9b82f9ffcabf"}]} [ 1456.648211] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ee64bd4c-72c8-4fd9-b0f7-9021c705b398 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.666472] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1456.683680] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773894, 'name': ReconfigVM_Task, 'duration_secs': 0.371285} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.687620] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f/855005ae-3b0e-4ad7-80cf-266075fc6d0f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1456.689668] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1456.689993] env[63371]: DEBUG nova.compute.provider_tree [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1456.694235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.694235] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81ec9fdd-9a41-4488-857f-3de55a012a76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.702876] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1456.708117] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1456.708117] env[63371]: value = "task-1773897" [ 1456.708117] env[63371]: _type = "Task" [ 1456.708117] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.708663] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1456.722208] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773897, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.752826] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1456.806823] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Took 49.21 seconds to build instance. [ 1456.872248] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773892, 'name': PowerOnVM_Task, 'duration_secs': 0.882056} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.873279] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1456.873279] env[63371]: INFO nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Took 5.60 seconds to spawn the instance on the hypervisor. [ 1456.873279] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1456.874887] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1456.877317] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c6d6db-5c98-422c-b564-287b6fe54777 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.914915] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1456.915164] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1456.915286] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1456.915465] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1456.915609] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1456.915786] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1456.915948] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1456.916117] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1456.916283] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1456.916445] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1456.916616] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1456.917931] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab03d0b-40e2-4ce1-9714-37e9553cf524 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.935515] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41683115-cf46-417e-a2eb-723d6e40a54a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.002738] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updating instance_info_cache with network_info: [{"id": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "address": "fa:16:3e:b4:74:9c", "network": {"id": "718e3616-e606-482f-90b3-aaac39f38b39", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-354664269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1771acadeced40a6889b7dfb974e7886", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d7a9b4-25", "ovs_interfaceid": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.020872] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.036331] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129032} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.039364] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1457.039770] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1457.040053] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1457.040217] env[63371]: INFO nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 2.43 seconds to destroy the instance on the hypervisor. [ 1457.041108] env[63371]: DEBUG oslo.service.loopingcall [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.043298] env[63371]: DEBUG nova.compute.manager [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1457.043298] env[63371]: DEBUG nova.network.neutron [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1457.227095] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773897, 'name': Rename_Task, 'duration_secs': 0.176755} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.227240] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1457.227632] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ee94aa2-5a2b-4d70-aea9-3e5582f416d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.236510] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1457.236510] env[63371]: value = "task-1773898" [ 1457.236510] env[63371]: _type = "Task" [ 1457.236510] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.246822] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.316531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.616s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.415553] env[63371]: INFO nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Took 36.39 seconds to build instance. [ 1457.421744] env[63371]: DEBUG nova.compute.manager [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received event network-vif-deleted-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1457.421885] env[63371]: DEBUG nova.compute.manager [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Received event network-changed-78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1457.422595] env[63371]: DEBUG nova.compute.manager [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Refreshing instance network info cache due to event network-changed-78d7a9b4-2512-4b55-95e3-50aa146658fa. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1457.422986] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] Acquiring lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.442944] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40720b46-c4dc-421e-a014-4619ad88f3d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.453519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd42d736-9b76-481f-806d-1b8302aeafd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.487831] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33182f54-745f-4c66-800b-90b6e3841b91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.496914] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e4a5c3-9e27-49b1-9e47-cea71e61a7cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.505470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Releasing lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.505470] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance network_info: |[{"id": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "address": "fa:16:3e:b4:74:9c", "network": {"id": "718e3616-e606-482f-90b3-aaac39f38b39", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-354664269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1771acadeced40a6889b7dfb974e7886", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d7a9b4-25", "ovs_interfaceid": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1457.513430] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] Acquired lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.513723] env[63371]: DEBUG nova.network.neutron [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Refreshing network info cache for port 78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1457.515301] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:74:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '257e5ea7-8b80-4301-9900-a754f1fe2031', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78d7a9b4-2512-4b55-95e3-50aa146658fa', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1457.523719] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Creating folder: Project (1771acadeced40a6889b7dfb974e7886). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1457.524401] env[63371]: DEBUG nova.compute.provider_tree [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.528672] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f54ebaf-0520-4281-87b1-238980664678 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.542651] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Created folder: Project (1771acadeced40a6889b7dfb974e7886) in parent group-v368199. [ 1457.542889] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Creating folder: Instances. Parent ref: group-v368312. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1457.543159] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac4b3495-7687-481b-aecf-c57563f23e49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.555289] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Created folder: Instances in parent group-v368312. [ 1457.555537] env[63371]: DEBUG oslo.service.loopingcall [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.556095] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1457.556329] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a83e5ed5-cdaa-41f2-96ad-2c30067cb0b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.585031] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1457.585031] env[63371]: value = "task-1773901" [ 1457.585031] env[63371]: _type = "Task" [ 1457.585031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.594436] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773901, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.751023] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773898, 'name': PowerOnVM_Task, 'duration_secs': 0.500702} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.751023] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1457.751023] env[63371]: INFO nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Took 10.06 seconds to spawn the instance on the hypervisor. [ 1457.751023] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1457.751023] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302a7a4c-5431-4a61-b787-05b31d476c8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.821331] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1457.927143] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.499s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.032983] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1458.100803] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773901, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.281839] env[63371]: INFO nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Took 47.35 seconds to build instance. [ 1458.352753] env[63371]: DEBUG nova.compute.manager [None req-8f5b2cac-c04f-45bb-bb67-b29ebdf578e1 tempest-ServerDiagnosticsV248Test-122985939 tempest-ServerDiagnosticsV248Test-122985939-project-admin] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1458.354743] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.357985] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6883fbe5-ea30-4b44-af0f-a3b0e1710a17 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.364272] env[63371]: DEBUG nova.network.neutron [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updated VIF entry in instance network info cache for port 78d7a9b4-2512-4b55-95e3-50aa146658fa. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1458.364858] env[63371]: DEBUG nova.network.neutron [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updating instance_info_cache with network_info: [{"id": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "address": "fa:16:3e:b4:74:9c", "network": {"id": "718e3616-e606-482f-90b3-aaac39f38b39", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-354664269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1771acadeced40a6889b7dfb974e7886", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d7a9b4-25", "ovs_interfaceid": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.373451] env[63371]: INFO nova.compute.manager [None req-8f5b2cac-c04f-45bb-bb67-b29ebdf578e1 tempest-ServerDiagnosticsV248Test-122985939 tempest-ServerDiagnosticsV248Test-122985939-project-admin] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Retrieving diagnostics [ 1458.374866] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af761f53-225d-4c51-9649-9810ce946e87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.424374] env[63371]: DEBUG nova.network.neutron [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.433647] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1458.488991] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Successfully updated port: 59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1458.538741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.693s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.541319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.499s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.541642] env[63371]: DEBUG nova.objects.instance [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lazy-loading 'resources' on Instance uuid ca53accc-a15f-4503-87e5-7cbf3e2c0b43 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1458.573289] env[63371]: INFO nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleted allocations for instance 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6 [ 1458.597954] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773901, 'name': CreateVM_Task, 'duration_secs': 0.55993} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.598171] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1458.598906] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.599345] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.599595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1458.600010] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe7643a2-fc21-4d05-8b28-e2e9e13320fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.619181] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1458.619181] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5283ae59-0d87-5e56-2c98-b0a22aeabffe" [ 1458.619181] env[63371]: _type = "Task" [ 1458.619181] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.629151] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5283ae59-0d87-5e56-2c98-b0a22aeabffe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.727141] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.727461] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.727688] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.727895] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.728128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.731563] env[63371]: INFO nova.compute.manager [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Terminating instance [ 1458.733960] env[63371]: DEBUG nova.compute.manager [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1458.734178] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.735611] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c3b962-84e7-46bd-b7b0-ece472273cab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.745809] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.746177] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e3ea91f-6569-440b-9195-60157b712712 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.755181] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1458.755181] env[63371]: value = "task-1773902" [ 1458.755181] env[63371]: _type = "Task" [ 1458.755181] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.765912] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.787063] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.614s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.868950] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] Releasing lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.931713] env[63371]: INFO nova.compute.manager [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 1.89 seconds to deallocate network for instance. [ 1458.966518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.996255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.996255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.996255] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1459.081584] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.287s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.135351] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5283ae59-0d87-5e56-2c98-b0a22aeabffe, 'name': SearchDatastore_Task, 'duration_secs': 0.01413} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.135970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.135970] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1459.136151] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.136299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.136475] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1459.136747] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-382bca50-85cf-4432-bdd8-fb935337f9f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.147988] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1459.148050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1459.149041] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8a1549d-5a21-4dec-a0ed-92c67a5b10a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.158946] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1459.158946] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccdabe-4b0c-192b-8e0d-5757bc772ca4" [ 1459.158946] env[63371]: _type = "Task" [ 1459.158946] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.168980] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccdabe-4b0c-192b-8e0d-5757bc772ca4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.267558] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773902, 'name': PowerOffVM_Task, 'duration_secs': 0.270797} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.267928] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1459.268148] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1459.268450] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58e84cfa-c002-435f-aadb-15880c9bb760 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.295008] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1459.364917] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1459.365190] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1459.365398] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Deleting the datastore file [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1459.368464] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca6003d2-b0c1-4e57-a8b4-5609dcd37989 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.379291] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1459.379291] env[63371]: value = "task-1773904" [ 1459.379291] env[63371]: _type = "Task" [ 1459.379291] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.400768] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.502892] env[63371]: INFO nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 0.57 seconds to detach 1 volumes for instance. [ 1459.505340] env[63371]: DEBUG nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Deleting volume: 1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8 {{(pid=63371) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1459.528411] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1459.673543] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccdabe-4b0c-192b-8e0d-5757bc772ca4, 'name': SearchDatastore_Task, 'duration_secs': 0.015358} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.678514] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "b5e259ea-d103-41c6-84b3-748813bb514d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.679146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.679360] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.679454] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.679687] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.682414] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-858c5561-eed0-49ff-8fb7-02751eca0909 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.684862] env[63371]: INFO nova.compute.manager [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Terminating instance [ 1459.688852] env[63371]: DEBUG nova.compute.manager [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1459.689899] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1459.690367] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ae3a2c-a9b8-4486-9fe5-03808248c9af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.699126] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1459.699126] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528e3763-75dc-ad00-3aed-b5dc551c04dd" [ 1459.699126] env[63371]: _type = "Task" [ 1459.699126] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.701713] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5baf98cc-9fbd-4d7f-8454-b1ab402bc350 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.713600] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1459.716238] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c455da5-2a6e-4937-b59e-aa1569bb4267 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.722062] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c07d27-c61a-491a-be29-f80acaea18c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.725813] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528e3763-75dc-ad00-3aed-b5dc551c04dd, 'name': SearchDatastore_Task, 'duration_secs': 0.014844} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.727511] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.727750] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95/be37eb1c-8582-4446-afd6-ae11a8cadf95.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1459.729269] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received event network-vif-deleted-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.729465] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Received event network-vif-plugged-59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.729645] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Acquiring lock "64fc862c-a755-4cac-997b-7a8328638269-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.729840] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Lock "64fc862c-a755-4cac-997b-7a8328638269-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.729996] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Lock "64fc862c-a755-4cac-997b-7a8328638269-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.730257] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] No waiting events found dispatching network-vif-plugged-59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1459.730466] env[63371]: WARNING nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Received unexpected event network-vif-plugged-59bb4dc3-13e6-4180-bec1-3a41954f8d62 for instance with vm_state building and task_state spawning. [ 1459.730660] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Received event network-changed-59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.731087] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Refreshing instance network info cache due to event network-changed-59bb4dc3-13e6-4180-bec1-3a41954f8d62. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1459.731357] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Acquiring lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.732768] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-922964d0-c675-41bc-958c-c2c1f024f8df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.764851] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1459.764851] env[63371]: value = "task-1773906" [ 1459.764851] env[63371]: _type = "Task" [ 1459.764851] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.766122] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updating instance_info_cache with network_info: [{"id": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "address": "fa:16:3e:68:06:08", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb4dc3-13", "ovs_interfaceid": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.768316] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e83d8f-1177-426a-b739-294be4459f9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.777214] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1459.777214] env[63371]: value = "task-1773907" [ 1459.777214] env[63371]: _type = "Task" [ 1459.777214] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.789248] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773906, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.793100] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63ae73e-3f15-4a74-bf65-1182e37398f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.804760] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.805308] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "201a2d1e-9e2c-4c07-92be-200408874ad4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.805567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.805810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.806066] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.806245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.819525] env[63371]: DEBUG nova.compute.provider_tree [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.822049] env[63371]: INFO nova.compute.manager [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Terminating instance [ 1459.823441] env[63371]: DEBUG nova.compute.manager [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1459.823551] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1459.826216] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d167fd0-871a-4d91-b578-d2308cb49a57 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.834114] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1459.834466] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0e520b9-3ee7-4ef4-a6e3-c5c3185b0d05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.843604] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1459.843604] env[63371]: value = "task-1773908" [ 1459.843604] env[63371]: _type = "Task" [ 1459.843604] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.844758] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.855594] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773908, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.889841] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324073} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.890138] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.890505] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.890720] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.890905] env[63371]: INFO nova.compute.manager [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1459.891185] env[63371]: DEBUG oslo.service.loopingcall [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.891575] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.891796] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.891986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.892184] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.892783] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.893915] env[63371]: DEBUG nova.compute.manager [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1459.894023] env[63371]: DEBUG nova.network.neutron [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1459.895950] env[63371]: INFO nova.compute.manager [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Terminating instance [ 1459.901022] env[63371]: DEBUG nova.compute.manager [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1459.901383] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1459.902061] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68617bc9-2ea0-411c-97fa-92866dc20a85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.912441] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1459.912805] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f2e6b5d-6d22-4095-8869-387dc2a972ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.921896] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1459.921896] env[63371]: value = "task-1773909" [ 1459.921896] env[63371]: _type = "Task" [ 1459.921896] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.934408] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773909, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.058962] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.272504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.272848] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance network_info: |[{"id": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "address": "fa:16:3e:68:06:08", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb4dc3-13", "ovs_interfaceid": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1460.273433] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Acquired lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.273618] env[63371]: DEBUG nova.network.neutron [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Refreshing network info cache for port 59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1460.275368] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:06:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59bb4dc3-13e6-4180-bec1-3a41954f8d62', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1460.284504] env[63371]: DEBUG oslo.service.loopingcall [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.286499] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1460.290368] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31fa72e5-7eef-47ce-be4c-d1b356186d5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.314452] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773906, 'name': PowerOffVM_Task, 'duration_secs': 0.478686} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.316100] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.316375] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1460.316684] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-538ba89e-01cd-4c0e-a155-39c036198912 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.326297] env[63371]: DEBUG nova.scheduler.client.report [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1460.328885] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773907, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.333155] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1460.333155] env[63371]: value = "task-1773910" [ 1460.333155] env[63371]: _type = "Task" [ 1460.333155] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.348830] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773910, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.360540] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773908, 'name': PowerOffVM_Task, 'duration_secs': 0.373073} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.360882] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.361092] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1460.361401] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2fb17282-a705-4a91-aab8-a3212bfb9c6c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.441038] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773909, 'name': PowerOffVM_Task, 'duration_secs': 0.300417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.441038] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.441038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1460.441038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1460.441038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1460.442060] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleting the datastore file [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.442060] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9b0b689-82ac-4735-bec8-6c858b86c5be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.442961] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0e08a0b-ce4c-4912-99a8-6f8f29a334e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.454800] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1460.454800] env[63371]: value = "task-1773913" [ 1460.454800] env[63371]: _type = "Task" [ 1460.454800] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.471666] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773913, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.477179] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1460.477179] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1460.477508] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleting the datastore file [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.478046] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d9929fa-ba7f-4aec-808e-2b878db7284d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.491793] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1460.491793] env[63371]: value = "task-1773915" [ 1460.491793] env[63371]: _type = "Task" [ 1460.491793] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.502999] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.729049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1460.729670] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1460.729670] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleting the datastore file [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.731320] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e4390fc-5085-4dcf-9a19-9f993f410d36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.738154] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1460.738154] env[63371]: value = "task-1773916" [ 1460.738154] env[63371]: _type = "Task" [ 1460.738154] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.749565] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.804313] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773907, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713087} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.804313] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95/be37eb1c-8582-4446-afd6-ae11a8cadf95.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1460.804313] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1460.804313] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da8862aa-a744-4e56-bc56-1c949e2222e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.813645] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1460.813645] env[63371]: value = "task-1773917" [ 1460.813645] env[63371]: _type = "Task" [ 1460.813645] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.825489] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.831015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.289s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.833761] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.259s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.835113] env[63371]: INFO nova.compute.claims [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1460.856618] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773910, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.867660] env[63371]: INFO nova.scheduler.client.report [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Deleted allocations for instance ca53accc-a15f-4503-87e5-7cbf3e2c0b43 [ 1460.967634] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773913, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35978} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.968126] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1460.968435] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1460.968834] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1460.969159] env[63371]: INFO nova.compute.manager [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1460.969541] env[63371]: DEBUG oslo.service.loopingcall [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.970187] env[63371]: DEBUG nova.compute.manager [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1460.970408] env[63371]: DEBUG nova.network.neutron [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1461.006023] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378601} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.006023] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1461.006023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1461.006023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1461.006023] env[63371]: INFO nova.compute.manager [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1461.006340] env[63371]: DEBUG oslo.service.loopingcall [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.006340] env[63371]: DEBUG nova.compute.manager [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1461.006340] env[63371]: DEBUG nova.network.neutron [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1461.250579] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326306} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.251065] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1461.251586] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1461.251920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1461.252221] env[63371]: INFO nova.compute.manager [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1461.252596] env[63371]: DEBUG oslo.service.loopingcall [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.253100] env[63371]: DEBUG nova.compute.manager [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1461.255026] env[63371]: DEBUG nova.network.neutron [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1461.255562] env[63371]: DEBUG nova.network.neutron [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.325877] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773917, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103104} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.329952] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1461.329952] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b75c990-e64e-4e1c-9b3c-4d9b652ad5af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.360827] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95/be37eb1c-8582-4446-afd6-ae11a8cadf95.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1461.361909] env[63371]: DEBUG nova.network.neutron [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updated VIF entry in instance network info cache for port 59bb4dc3-13e6-4180-bec1-3a41954f8d62. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.362261] env[63371]: DEBUG nova.network.neutron [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updating instance_info_cache with network_info: [{"id": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "address": "fa:16:3e:68:06:08", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb4dc3-13", "ovs_interfaceid": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.367686] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9523dd9c-d9c2-4268-ad29-c39940035f87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.385079] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Releasing lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.388151] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.097s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.395612] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773910, 'name': CreateVM_Task, 'duration_secs': 0.62755} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.399579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1461.399579] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1461.399579] env[63371]: value = "task-1773918" [ 1461.399579] env[63371]: _type = "Task" [ 1461.399579] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.399579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.399579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.399871] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1461.400143] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91659464-12a2-4817-a91e-9c90fa31f81f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.411154] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1461.411154] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521a7b04-a00e-fd60-2a51-9aed4de3530e" [ 1461.411154] env[63371]: _type = "Task" [ 1461.411154] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.416274] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.429280] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521a7b04-a00e-fd60-2a51-9aed4de3530e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.758861] env[63371]: INFO nova.compute.manager [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Took 1.86 seconds to deallocate network for instance. [ 1461.915331] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.927199] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521a7b04-a00e-fd60-2a51-9aed4de3530e, 'name': SearchDatastore_Task, 'duration_secs': 0.018157} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.927518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.927718] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1461.928028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.928211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.928409] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1461.931024] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7aa62ffc-18fc-421b-9b5d-a33b70b67bdc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.940579] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1461.940817] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1461.942839] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55788b4a-4435-4462-beb7-a705a3368ce2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.948930] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1461.948930] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52621bc7-eae8-f6de-ea29-f253a2a1cdb2" [ 1461.948930] env[63371]: _type = "Task" [ 1461.948930] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.961762] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52621bc7-eae8-f6de-ea29-f253a2a1cdb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.089464] env[63371]: DEBUG nova.network.neutron [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.118783] env[63371]: DEBUG nova.network.neutron [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.133361] env[63371]: DEBUG nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received event network-vif-deleted-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.133361] env[63371]: DEBUG nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Received event network-vif-deleted-e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.133361] env[63371]: DEBUG nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Received event network-vif-deleted-d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.133537] env[63371]: INFO nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Neutron deleted interface d1b325d0-b864-44be-8fe4-b923489752d0; detaching it from the instance and deleting it from the info cache [ 1462.133537] env[63371]: DEBUG nova.network.neutron [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.145213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.145604] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.148342] env[63371]: DEBUG nova.objects.instance [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'flavor' on Instance uuid 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1462.269574] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.414640] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.433386] env[63371]: DEBUG nova.network.neutron [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.448033] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717f477d-60ba-427c-aac7-68ce83f96faa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.469751] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c871d79-95d9-4a29-b021-0ae49cbed0db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.483911] env[63371]: DEBUG nova.compute.manager [req-2573811d-85ba-4b61-a02b-53dffc37b08c req-d85f0f1d-b91f-45bd-bc11-2d7326d6158d service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Received event network-vif-deleted-e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.484545] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52621bc7-eae8-f6de-ea29-f253a2a1cdb2, 'name': SearchDatastore_Task, 'duration_secs': 0.032602} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.486993] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67db17e0-b3d9-43d5-86c8-1104fd414982 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.523273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f0c023-52bc-4a98-a4ac-196a23c33b0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.529594] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1462.529594] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f1b9d1-c4c2-6629-8dbc-9d9a8bd22a49" [ 1462.529594] env[63371]: _type = "Task" [ 1462.529594] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.541951] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0d9a34-ebcc-45ad-9977-5bc820ff242a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.549715] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f1b9d1-c4c2-6629-8dbc-9d9a8bd22a49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.564750] env[63371]: DEBUG nova.compute.provider_tree [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.592649] env[63371]: INFO nova.compute.manager [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Took 1.62 seconds to deallocate network for instance. [ 1462.629626] env[63371]: INFO nova.compute.manager [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Took 1.62 seconds to deallocate network for instance. [ 1462.641187] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ddc4cc49-b32e-47d1-9e37-c5b7de59e22e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.656599] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e4db20-c8e1-4b99-a2d1-796a8291912c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.698195] env[63371]: DEBUG nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Detach interface failed, port_id=d1b325d0-b864-44be-8fe4-b923489752d0, reason: Instance 201a2d1e-9e2c-4c07-92be-200408874ad4 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1462.779373] env[63371]: DEBUG nova.objects.instance [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'pci_requests' on Instance uuid 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1462.912504] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.936028] env[63371]: INFO nova.compute.manager [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Took 1.68 seconds to deallocate network for instance. [ 1463.046531] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f1b9d1-c4c2-6629-8dbc-9d9a8bd22a49, 'name': SearchDatastore_Task, 'duration_secs': 0.028692} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.046822] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.047488] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 64fc862c-a755-4cac-997b-7a8328638269/64fc862c-a755-4cac-997b-7a8328638269.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.047488] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f73348a8-497b-451b-8ac5-8d8ec69e71ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.051972] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.052207] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.057627] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1463.057627] env[63371]: value = "task-1773919" [ 1463.057627] env[63371]: _type = "Task" [ 1463.057627] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.068025] env[63371]: DEBUG nova.scheduler.client.report [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1463.071204] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.103038] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.136567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.283885] env[63371]: DEBUG nova.objects.base [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Object Instance<7e463dd7-84a6-4e6d-ae8f-0860e3a20f05> lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1463.283885] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1463.333712] env[63371]: DEBUG nova.policy [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1463.412722] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task, 'duration_secs': 1.634441} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.412896] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Reconfigured VM instance instance-00000028 to attach disk [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95/be37eb1c-8582-4446-afd6-ae11a8cadf95.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1463.413572] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e5dcf02-8287-4635-bfdd-e85aabbf9b80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.421325] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1463.421325] env[63371]: value = "task-1773920" [ 1463.421325] env[63371]: _type = "Task" [ 1463.421325] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.430459] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.442835] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.557852] env[63371]: DEBUG nova.compute.utils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1463.568927] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.573163] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.740s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.573693] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1463.576485] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.042s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.576718] env[63371]: DEBUG nova.objects.instance [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lazy-loading 'resources' on Instance uuid 9249f27a-1985-4be1-947c-e433c7aa26f1 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1463.634766] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Successfully created port: 8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1463.935969] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.058972] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.069776] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.080515] env[63371]: DEBUG nova.compute.utils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1464.084323] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1464.084497] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1464.125661] env[63371]: DEBUG nova.policy [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58195dc4ac74493cbe7ed4fbe63bce54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cc236260a947899c5e09bca25f7360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1464.399724] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Successfully created port: 9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1464.435296] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.573939] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.587226] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1464.656587] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99347f98-cc95-4459-811b-8c696bbdc2ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.667138] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c23bb3a-999a-4a36-ac47-48d2dddaff98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.705513] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949d5b23-ffc5-4f22-9693-104f84ad8825 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.718764] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9aaa7c2-e81c-4171-a570-3534f6e840f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.737588] env[63371]: DEBUG nova.compute.provider_tree [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.939310] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.071854] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.211961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.212243] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.212478] env[63371]: INFO nova.compute.manager [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Attaching volume 5d39df22-c7dc-4c2a-8bed-1f0a74a568c4 to /dev/sdb [ 1465.241642] env[63371]: DEBUG nova.scheduler.client.report [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1465.252521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53aadf3-bbb0-4b02-b0c5-5cd22da9d0c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.260595] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc671b3-32a6-4a93-86ca-561ee4b580b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.276670] env[63371]: DEBUG nova.virt.block_device [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating existing volume attachment record: 746f8f05-8459-49a4-a68d-b54f26e685f9 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1465.325328] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Successfully updated port: 8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1465.436199] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.574838] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.599240] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1465.625882] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1465.625882] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1465.625882] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1465.626705] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1465.626705] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1465.626705] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1465.627137] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1465.627361] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1465.627678] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1465.627894] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1465.628284] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1465.630301] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c01a640-6354-4487-a049-b39a9cfa86a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.640706] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5aa865-3635-4700-8cad-de9511839bac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.747096] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.170s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.753394] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.436s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.753394] env[63371]: INFO nova.compute.claims [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1465.778650] env[63371]: INFO nova.scheduler.client.report [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Deleted allocations for instance 9249f27a-1985-4be1-947c-e433c7aa26f1 [ 1465.828703] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.828911] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.829069] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1465.939522] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.073777] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.92498} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.074107] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 64fc862c-a755-4cac-997b-7a8328638269/64fc862c-a755-4cac-997b-7a8328638269.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1466.074373] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1466.074776] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2dfd9421-982c-4062-aba2-d31e63353586 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.084542] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1466.084542] env[63371]: value = "task-1773924" [ 1466.084542] env[63371]: _type = "Task" [ 1466.084542] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.097654] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.109050] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "0e2c8ced-198f-43be-9d41-703a7c590df4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.109503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.289015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.681s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.369661] env[63371]: WARNING nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] 78c77028-c23a-4160-8b08-d336e8101b3b already exists in list: networks containing: ['78c77028-c23a-4160-8b08-d336e8101b3b']. ignoring it [ 1466.441927] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task, 'duration_secs': 2.532401} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.443283] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Successfully updated port: 9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1466.446100] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.446100] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01f31744-b120-4bd5-82e9-6ad9c61f4c0c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.455190] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1466.455190] env[63371]: value = "task-1773925" [ 1466.455190] env[63371]: _type = "Task" [ 1466.455190] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.468880] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773925, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.533411] env[63371]: DEBUG nova.compute.manager [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-vif-plugged-8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1466.533411] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.533508] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.534362] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.534710] env[63371]: DEBUG nova.compute.manager [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] No waiting events found dispatching network-vif-plugged-8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1466.534757] env[63371]: WARNING nova.compute.manager [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received unexpected event network-vif-plugged-8e143eba-fc86-4474-91f7-a5785bb2dbe3 for instance with vm_state active and task_state None. [ 1466.596476] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085998} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.596676] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1466.597795] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8fba7b-1b15-4e5d-b05b-f3d5cbb709fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.622278] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 64fc862c-a755-4cac-997b-7a8328638269/64fc862c-a755-4cac-997b-7a8328638269.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1466.622717] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-271c206f-ad4d-485c-87ca-53453d9884fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.650267] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1466.650267] env[63371]: value = "task-1773926" [ 1466.650267] env[63371]: _type = "Task" [ 1466.650267] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.662239] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.903623] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8e143eba-fc86-4474-91f7-a5785bb2dbe3", "address": "fa:16:3e:4e:95:62", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e143eba-fc", "ovs_interfaceid": "8e143eba-fc86-4474-91f7-a5785bb2dbe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.946409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.946409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.946771] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1466.972835] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773925, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.018924] env[63371]: DEBUG nova.compute.manager [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Received event network-vif-plugged-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1467.021784] env[63371]: DEBUG oslo_concurrency.lockutils [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] Acquiring lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.021784] env[63371]: DEBUG oslo_concurrency.lockutils [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] Lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.021784] env[63371]: DEBUG oslo_concurrency.lockutils [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] Lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.021784] env[63371]: DEBUG nova.compute.manager [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] No waiting events found dispatching network-vif-plugged-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1467.021784] env[63371]: WARNING nova.compute.manager [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Received unexpected event network-vif-plugged-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a for instance with vm_state building and task_state spawning. [ 1467.165170] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773926, 'name': ReconfigVM_Task, 'duration_secs': 0.304594} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.168535] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 64fc862c-a755-4cac-997b-7a8328638269/64fc862c-a755-4cac-997b-7a8328638269.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1467.168768] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4708f736-6e42-48f8-92f1-f278fa5e59e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.178203] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1467.178203] env[63371]: value = "task-1773927" [ 1467.178203] env[63371]: _type = "Task" [ 1467.178203] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.190942] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773927, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.329573] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169b54f3-5f1e-4e80-87a5-7452ef56a7e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.338936] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41057678-eb45-408e-ab33-25b7cf062d45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.371053] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97809567-e4a0-4275-9555-c470706f0e8e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.379991] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c709e9bd-b39a-455c-b0ac-f2b930a6b269 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.394476] env[63371]: DEBUG nova.compute.provider_tree [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.407279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.407937] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.408113] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.409226] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e17611-674b-42c4-852e-15eb41ccb579 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.427588] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1467.427588] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1467.427588] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1467.427839] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1467.427839] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1467.427966] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1467.436745] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfiguring VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1467.436745] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73735527-1f9a-4f70-b4f4-de5663feea03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.454721] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1467.454721] env[63371]: value = "task-1773928" [ 1467.454721] env[63371]: _type = "Task" [ 1467.454721] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.468882] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773925, 'name': PowerOnVM_Task, 'duration_secs': 0.636102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.471929] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1467.472158] env[63371]: INFO nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Took 13.47 seconds to spawn the instance on the hypervisor. [ 1467.472344] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1467.472678] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773928, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.473389] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b1a94e-5ee9-44f1-a28e-5b74e613c3f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.484208] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1467.691295] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773927, 'name': Rename_Task, 'duration_secs': 0.165582} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.691295] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1467.691295] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5658bbd7-91ec-434e-899c-e3ea2aeba12f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.700295] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1467.700295] env[63371]: value = "task-1773929" [ 1467.700295] env[63371]: _type = "Task" [ 1467.700295] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.700295] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Updating instance_info_cache with network_info: [{"id": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "address": "fa:16:3e:12:54:75", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9661bc17-8f", "ovs_interfaceid": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.711435] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.898290] env[63371]: DEBUG nova.scheduler.client.report [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1467.967629] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.994595] env[63371]: INFO nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Took 43.32 seconds to build instance. [ 1468.203328] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.203328] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance network_info: |[{"id": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "address": "fa:16:3e:12:54:75", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9661bc17-8f", "ovs_interfaceid": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1468.204405] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:54:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9661bc17-8fdd-42bf-ae5d-bfa211e88e4a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1468.212097] env[63371]: DEBUG oslo.service.loopingcall [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1468.216806] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1468.217190] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17b815da-a34b-471a-9d2b-1fda7eef1e3b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.240104] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773929, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.241603] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1468.241603] env[63371]: value = "task-1773931" [ 1468.241603] env[63371]: _type = "Task" [ 1468.241603] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.251348] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773931, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.403865] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.404428] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1468.407930] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.793s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.408130] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.408313] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1468.408672] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.725s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.417162] env[63371]: INFO nova.compute.claims [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1468.418019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0c4c17-2ac4-49c7-9f14-e90ad7b16bfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.431623] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747b7c7b-1116-49fc-bc31-0b138e00f717 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.450655] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4893a758-1745-4e19-af40-6c96002d3789 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.471013] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b636c8-4035-441e-8e25-b60fda24b5a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.482911] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773928, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.519583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.719s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.520892] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178801MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1468.520892] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.718020] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773929, 'name': PowerOnVM_Task, 'duration_secs': 0.638703} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.718020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1468.718020] env[63371]: INFO nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Took 11.84 seconds to spawn the instance on the hypervisor. [ 1468.718020] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1468.718020] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adf394b-8574-4095-9b17-38b987f531f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.752318] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773931, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.781530] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.782167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.919389] env[63371]: DEBUG nova.compute.utils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1468.920884] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1468.921071] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1468.968183] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773928, 'name': ReconfigVM_Task, 'duration_secs': 1.153003} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.969649] env[63371]: DEBUG nova.policy [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a57fe97ed9414622a09d1d59a9eff8bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00a8f1cf54a6426b9980b16b17283b19', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1468.971542] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.971611] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfigured VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1469.023210] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1469.237548] env[63371]: INFO nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Took 43.38 seconds to build instance. [ 1469.253861] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773931, 'name': CreateVM_Task, 'duration_secs': 0.529809} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.253861] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1469.254733] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.254894] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.255248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1469.256102] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcc149d7-f7b3-4c4b-86f0-ac141ab26f98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.262269] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1469.262269] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c2bbcc-0fcf-1025-0a0d-4eb91557b449" [ 1469.262269] env[63371]: _type = "Task" [ 1469.262269] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.271296] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c2bbcc-0fcf-1025-0a0d-4eb91557b449, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.352213] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Successfully created port: 5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1469.386115] env[63371]: DEBUG nova.compute.manager [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-changed-8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1469.386322] env[63371]: DEBUG nova.compute.manager [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing instance network info cache due to event network-changed-8e143eba-fc86-4474-91f7-a5785bb2dbe3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1469.386543] env[63371]: DEBUG oslo_concurrency.lockutils [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.386754] env[63371]: DEBUG oslo_concurrency.lockutils [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.386941] env[63371]: DEBUG nova.network.neutron [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing network info cache for port 8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1469.425357] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1469.479190] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.334s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.548720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.641504] env[63371]: DEBUG nova.compute.manager [None req-dc7c3fd3-2556-44a6-bb03-17cff56a52ac tempest-ServerDiagnosticsV248Test-122985939 tempest-ServerDiagnosticsV248Test-122985939-project-admin] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1469.643273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad434f9-026b-4a14-addc-1594661a5eab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.655881] env[63371]: INFO nova.compute.manager [None req-dc7c3fd3-2556-44a6-bb03-17cff56a52ac tempest-ServerDiagnosticsV248Test-122985939 tempest-ServerDiagnosticsV248Test-122985939-project-admin] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Retrieving diagnostics [ 1469.656763] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794f1bf3-3d4e-4086-a15c-a66d2083b832 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.740641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.734s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.774014] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c2bbcc-0fcf-1025-0a0d-4eb91557b449, 'name': SearchDatastore_Task, 'duration_secs': 0.018466} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.776594] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.776862] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1469.777109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.777257] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.777456] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1469.777926] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cb3fc4d-6fa1-4965-a5f6-660d9e7826c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.787861] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1469.788082] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1469.791287] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16672559-2c77-4a4f-b4c9-855c0f0916ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.797213] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1469.797213] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52db39ca-bb87-83a7-0272-b58d6fc00cf9" [ 1469.797213] env[63371]: _type = "Task" [ 1469.797213] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.804940] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db39ca-bb87-83a7-0272-b58d6fc00cf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.842669] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1469.842974] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368317', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'name': 'volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '44cc8606-24f5-4f6b-b96f-3559c9c3f06e', 'attached_at': '', 'detached_at': '', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'serial': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1469.843849] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1735033-4318-4dba-b6f3-39e546b200c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.865441] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235f5362-c0c3-4dfe-be05-1c3633e99c69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.890238] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4/volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1469.894834] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-274f529b-f7c7-401b-aa0a-6ed56d19d26b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.913885] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1469.913885] env[63371]: value = "task-1773932" [ 1469.913885] env[63371]: _type = "Task" [ 1469.913885] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.925234] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773932, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.010728] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa768438-c7e2-42c0-bdf3-1087a521e897 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.019194] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e99babf-e178-4601-9735-297950ecb520 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.056858] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df5692c-8662-40c7-9fea-5d19a5c37b45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.067411] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1bb6ab2-907c-4c9a-87b3-abcccc913d78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.087558] env[63371]: DEBUG nova.compute.provider_tree [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.166254] env[63371]: DEBUG nova.compute.manager [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Received event network-changed-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1470.166431] env[63371]: DEBUG nova.compute.manager [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Refreshing instance network info cache due to event network-changed-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1470.167396] env[63371]: DEBUG oslo_concurrency.lockutils [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] Acquiring lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.167572] env[63371]: DEBUG oslo_concurrency.lockutils [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] Acquired lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.167747] env[63371]: DEBUG nova.network.neutron [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Refreshing network info cache for port 9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1470.170270] env[63371]: DEBUG nova.network.neutron [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updated VIF entry in instance network info cache for port 8e143eba-fc86-4474-91f7-a5785bb2dbe3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1470.170718] env[63371]: DEBUG nova.network.neutron [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8e143eba-fc86-4474-91f7-a5785bb2dbe3", "address": "fa:16:3e:4e:95:62", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e143eba-fc", "ovs_interfaceid": "8e143eba-fc86-4474-91f7-a5785bb2dbe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.243808] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1470.305539] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1470.306697] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562df63d-62c5-4f46-ac16-ec30029d0ead {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.314076] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1470.314263] env[63371]: ERROR oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk due to incomplete transfer. [ 1470.318486] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-73625c23-9296-4b00-aa40-7a41d96e078b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.319762] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db39ca-bb87-83a7-0272-b58d6fc00cf9, 'name': SearchDatastore_Task, 'duration_secs': 0.037757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.321047] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d4af369-ffb6-43c3-ba42-eebb9cc7ae29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.326675] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1470.326675] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523cdfaa-b2a7-2213-305d-8e79378ce76b" [ 1470.326675] env[63371]: _type = "Task" [ 1470.326675] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.327979] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1470.328331] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Uploaded image d6a027d0-1605-4385-9e91-38b4326d06e7 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1470.331009] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1470.334992] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-199de9e0-8268-4f1e-b896-f225d8f6b0f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.341848] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523cdfaa-b2a7-2213-305d-8e79378ce76b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.343342] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1470.343342] env[63371]: value = "task-1773933" [ 1470.343342] env[63371]: _type = "Task" [ 1470.343342] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.352490] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773933, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.423801] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773932, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.438267] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1470.460721] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1470.461007] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1470.461179] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1470.461364] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1470.461590] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1470.461743] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1470.461950] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1470.462130] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1470.462302] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1470.462467] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1470.462657] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1470.463583] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a36f0be-c3c1-42e6-9aee-30a97e077439 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.471997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70327044-7a68-4602-b8d3-36e040802dd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.591168] env[63371]: DEBUG nova.scheduler.client.report [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1470.673019] env[63371]: DEBUG oslo_concurrency.lockutils [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.777209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.841222] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523cdfaa-b2a7-2213-305d-8e79378ce76b, 'name': SearchDatastore_Task, 'duration_secs': 0.023555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.841222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.841222] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714/dcf8063b-56eb-439c-bee5-139a1e157714.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1470.841222] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9df852ad-b74d-4ac1-88a5-9654efcfddbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.848479] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1470.848479] env[63371]: value = "task-1773934" [ 1470.848479] env[63371]: _type = "Task" [ 1470.848479] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.854526] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773933, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.859394] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.926065] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773932, 'name': ReconfigVM_Task, 'duration_secs': 0.630982} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.929310] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfigured VM instance instance-0000001f to attach disk [datastore1] volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4/volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1470.935016] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1890a8b-2963-4ea2-bbc6-68bcf131c8a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.953502] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1470.953502] env[63371]: value = "task-1773935" [ 1470.953502] env[63371]: _type = "Task" [ 1470.953502] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.962527] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773935, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.993878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.994261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.994439] env[63371]: INFO nova.compute.manager [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Rebooting instance [ 1471.009748] env[63371]: DEBUG nova.network.neutron [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Updated VIF entry in instance network info cache for port 9661bc17-8fdd-42bf-ae5d-bfa211e88e4a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1471.010158] env[63371]: DEBUG nova.network.neutron [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Updating instance_info_cache with network_info: [{"id": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "address": "fa:16:3e:12:54:75", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9661bc17-8f", "ovs_interfaceid": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.026670] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Successfully updated port: 5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1471.099153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.099504] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1471.102556] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.250s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.104143] env[63371]: INFO nova.compute.claims [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1471.357263] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773933, 'name': Destroy_Task, 'duration_secs': 0.896785} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.360771] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroyed the VM [ 1471.361188] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1471.361506] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773934, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.362064] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f50fea1d-e46f-40dd-9c55-1df567096014 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.370684] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1471.370684] env[63371]: value = "task-1773936" [ 1471.370684] env[63371]: _type = "Task" [ 1471.370684] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.381519] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773936, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.391355] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "50d5eac1-0752-4089-948c-b04439df6f6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.391502] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.391705] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "50d5eac1-0752-4089-948c-b04439df6f6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.392369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.392369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.394172] env[63371]: INFO nova.compute.manager [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Terminating instance [ 1471.395986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "refresh_cache-50d5eac1-0752-4089-948c-b04439df6f6c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.396266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquired lock "refresh_cache-50d5eac1-0752-4089-948c-b04439df6f6c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.396461] env[63371]: DEBUG nova.network.neutron [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1471.464461] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773935, 'name': ReconfigVM_Task, 'duration_secs': 0.162876} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.464763] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368317', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'name': 'volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '44cc8606-24f5-4f6b-b96f-3559c9c3f06e', 'attached_at': '', 'detached_at': '', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'serial': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1471.512702] env[63371]: DEBUG oslo_concurrency.lockutils [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] Releasing lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.518802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.520688] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.520688] env[63371]: DEBUG nova.network.neutron [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1471.530232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.530232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquired lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.530232] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1471.535377] env[63371]: DEBUG nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Received event network-vif-plugged-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.535377] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Acquiring lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.535377] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.535377] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.535377] env[63371]: DEBUG nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] No waiting events found dispatching network-vif-plugged-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1471.535377] env[63371]: WARNING nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Received unexpected event network-vif-plugged-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 for instance with vm_state building and task_state spawning. [ 1471.535377] env[63371]: DEBUG nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Received event network-changed-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.537281] env[63371]: DEBUG nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Refreshing instance network info cache due to event network-changed-5f6d168b-1bd3-4bdd-9693-ee62c25e8666. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1471.537755] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Acquiring lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.610732] env[63371]: DEBUG nova.compute.utils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1471.615523] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1471.615523] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1471.714649] env[63371]: DEBUG nova.policy [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c1437e43364f0ba8db6677fe2ed978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3fa37041acf4211987c97c105c47cf0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1471.859660] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699297} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.862608] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714/dcf8063b-56eb-439c-bee5-139a1e157714.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1471.862608] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1471.862608] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8275f6f-a245-41a7-a201-b2386cf6431e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.869039] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1471.869039] env[63371]: value = "task-1773937" [ 1471.869039] env[63371]: _type = "Task" [ 1471.869039] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.878949] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773936, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.882362] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773937, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.918756] env[63371]: DEBUG nova.network.neutron [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1471.981029] env[63371]: DEBUG nova.network.neutron [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.071911] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1472.115688] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1472.126454] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-8e143eba-fc86-4474-91f7-a5785bb2dbe3" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.126703] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-8e143eba-fc86-4474-91f7-a5785bb2dbe3" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.258372] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Successfully created port: 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1472.382013] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773937, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070028} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.382781] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1472.383568] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6ed915-937c-47bd-9e34-aa0e3d5b0321 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.390531] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773936, 'name': RemoveSnapshot_Task} progress is 74%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.413144] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714/dcf8063b-56eb-439c-bee5-139a1e157714.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1472.416618] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cafb811-876e-44e4-93b8-e92c54438321 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.439485] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Updating instance_info_cache with network_info: [{"id": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "address": "fa:16:3e:79:87:94", "network": {"id": "c6b502a7-b302-4a03-a23e-3a32b446d367", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-615984944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a8f1cf54a6426b9980b16b17283b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f6d168b-1b", "ovs_interfaceid": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.450396] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1472.450396] env[63371]: value = "task-1773938" [ 1472.450396] env[63371]: _type = "Task" [ 1472.450396] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.462800] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773938, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.484663] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Releasing lock "refresh_cache-50d5eac1-0752-4089-948c-b04439df6f6c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.485079] env[63371]: DEBUG nova.compute.manager [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1472.485393] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1472.486134] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dc79f6-a6a6-48c7-9ffd-e0b908f23110 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.494056] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1472.496842] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b44da852-b05d-4196-b45b-2b425a14bcd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.503229] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1472.503229] env[63371]: value = "task-1773939" [ 1472.503229] env[63371]: _type = "Task" [ 1472.503229] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.515318] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.516179] env[63371]: DEBUG nova.network.neutron [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updating instance_info_cache with network_info: [{"id": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "address": "fa:16:3e:b4:74:9c", "network": {"id": "718e3616-e606-482f-90b3-aaac39f38b39", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-354664269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1771acadeced40a6889b7dfb974e7886", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d7a9b4-25", "ovs_interfaceid": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.519420] env[63371]: DEBUG nova.objects.instance [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'flavor' on Instance uuid 44cc8606-24f5-4f6b-b96f-3559c9c3f06e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1472.628950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.630008] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.632185] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62c5d88-63ae-4db3-a487-a8e6c671b484 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.656087] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33567575-079c-47d0-b541-00d8e1e774a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.690694] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfiguring VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1472.693921] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1802f9b1-49fd-4aea-8d19-76c6f1527e9f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.713162] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1472.713162] env[63371]: value = "task-1773940" [ 1472.713162] env[63371]: _type = "Task" [ 1472.713162] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.725056] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.883993] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773936, 'name': RemoveSnapshot_Task, 'duration_secs': 1.438082} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.888564] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1472.888564] env[63371]: INFO nova.compute.manager [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 23.48 seconds to snapshot the instance on the hypervisor. [ 1472.917056] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641a0280-6d08-4cab-9440-c560b59ac5fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.924963] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a4902b-40ae-458a-a413-386f5b86bcde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.958821] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Releasing lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.959176] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance network_info: |[{"id": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "address": "fa:16:3e:79:87:94", "network": {"id": "c6b502a7-b302-4a03-a23e-3a32b446d367", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-615984944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a8f1cf54a6426b9980b16b17283b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f6d168b-1b", "ovs_interfaceid": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1472.959794] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Acquired lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.960303] env[63371]: DEBUG nova.network.neutron [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Refreshing network info cache for port 5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1472.961581] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:87:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c6a4836-66dc-4e43-982b-f8fcd3f9989a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f6d168b-1bd3-4bdd-9693-ee62c25e8666', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1472.970231] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating folder: Project (00a8f1cf54a6426b9980b16b17283b19). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1472.973844] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a70563-25c4-4f90-951c-0ef46584b7cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.979582] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b446c28-bce4-4fe9-8f01-15ab7d34e516 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.990739] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d47345-f44b-462d-84d0-edd264e10496 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.994871] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773938, 'name': ReconfigVM_Task, 'duration_secs': 0.395276} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.996129] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Reconfigured VM instance instance-0000002a to attach disk [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714/dcf8063b-56eb-439c-bee5-139a1e157714.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1472.996772] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Created folder: Project (00a8f1cf54a6426b9980b16b17283b19) in parent group-v368199. [ 1472.996948] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating folder: Instances. Parent ref: group-v368319. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1472.997496] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce5b87d7-7212-4073-a33c-cd29c0cd1bfe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.999434] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1744a7f-064a-419b-81aa-3cf4e846f6db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.013629] env[63371]: DEBUG nova.compute.provider_tree [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.017857] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1473.017857] env[63371]: value = "task-1773942" [ 1473.017857] env[63371]: _type = "Task" [ 1473.017857] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.021057] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Releasing lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.022684] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773939, 'name': PowerOffVM_Task, 'duration_secs': 0.138875} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.024646] env[63371]: DEBUG nova.compute.manager [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1473.029334] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1473.029334] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1473.029563] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Created folder: Instances in parent group-v368319. [ 1473.029772] env[63371]: DEBUG oslo.service.loopingcall [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1473.030754] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080a4d45-b436-4ef2-a348-baa88a6ac05a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.033838] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.822s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.034763] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-708ab892-ff8b-4675-a89d-cbbb489c4e0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.036071] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1473.037223] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-680ecf24-9d5c-4ca2-bfaf-161a365c3768 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.057279] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773942, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.064875] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1473.064875] env[63371]: value = "task-1773945" [ 1473.064875] env[63371]: _type = "Task" [ 1473.064875] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.073926] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773945, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.082939] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1473.083165] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1473.083584] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Deleting the datastore file [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1473.083584] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8b45aea-7c02-47ed-bb54-358ad22ae7fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.090613] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1473.090613] env[63371]: value = "task-1773946" [ 1473.090613] env[63371]: _type = "Task" [ 1473.090613] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.099368] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.128582] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1473.160683] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1473.161228] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1473.161578] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1473.161925] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1473.162210] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1473.162487] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1473.162842] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1473.163897] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1473.163897] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1473.163897] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1473.163897] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1473.165044] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a08c7b-6dba-4df1-a704-e8b205098e3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.175621] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a29f5fb-02da-426d-a52b-ec12dfd4b132 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.223914] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.518152] env[63371]: DEBUG nova.scheduler.client.report [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1473.530649] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773942, 'name': Rename_Task, 'duration_secs': 0.217738} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.530914] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1473.531175] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-709634b6-2204-4cd3-bb8b-32dcb535761d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.540940] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1473.540940] env[63371]: value = "task-1773947" [ 1473.540940] env[63371]: _type = "Task" [ 1473.540940] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.549907] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.577320] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773945, 'name': CreateVM_Task, 'duration_secs': 0.364019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.577486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1473.578162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.578377] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.578696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1473.579214] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-835cc217-7c19-45de-83c5-644d13c9bfdc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.583638] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1473.583638] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c49104-2224-9478-fb5a-0d2e51712dca" [ 1473.583638] env[63371]: _type = "Task" [ 1473.583638] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.584392] env[63371]: DEBUG nova.network.neutron [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Updated VIF entry in instance network info cache for port 5f6d168b-1bd3-4bdd-9693-ee62c25e8666. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1473.584715] env[63371]: DEBUG nova.network.neutron [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Updating instance_info_cache with network_info: [{"id": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "address": "fa:16:3e:79:87:94", "network": {"id": "c6b502a7-b302-4a03-a23e-3a32b446d367", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-615984944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a8f1cf54a6426b9980b16b17283b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f6d168b-1b", "ovs_interfaceid": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.594972] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c49104-2224-9478-fb5a-0d2e51712dca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.603534] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166964} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.603772] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1473.603946] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1473.604128] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1473.604291] env[63371]: INFO nova.compute.manager [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1473.604514] env[63371]: DEBUG oslo.service.loopingcall [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1473.604905] env[63371]: DEBUG nova.compute.manager [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1473.605046] env[63371]: DEBUG nova.network.neutron [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1473.638074] env[63371]: DEBUG nova.network.neutron [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1473.724620] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.026673] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.924s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.027232] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1474.030512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.942s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.030746] env[63371]: DEBUG nova.objects.instance [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lazy-loading 'resources' on Instance uuid b48a8e83-e581-4886-833b-bbce155d40d9 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1474.053087] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.074162] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7b9864-c3de-4a71-a9a5-5ef254ff9917 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.081588] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Doing hard reboot of VM {{(pid=63371) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1474.082064] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-1053944c-3bab-45fc-a566-b84ecfa3bed6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.090370] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Releasing lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.090796] env[63371]: DEBUG oslo_vmware.api [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1474.090796] env[63371]: value = "task-1773948" [ 1474.090796] env[63371]: _type = "Task" [ 1474.090796] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.097363] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c49104-2224-9478-fb5a-0d2e51712dca, 'name': SearchDatastore_Task, 'duration_secs': 0.04635} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.097985] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.098281] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1474.098571] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.098918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.099084] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1474.105954] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-545e1581-5bb0-4440-bf7e-c26dcaed1778 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.111025] env[63371]: DEBUG oslo_vmware.api [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773948, 'name': ResetVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.118745] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1474.118993] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1474.119886] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c0d70c3-781f-4de0-8b0e-962244cdac1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.127464] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1474.127464] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b3562d-8039-630f-5f37-46b3c2ce933f" [ 1474.127464] env[63371]: _type = "Task" [ 1474.127464] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.135134] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b3562d-8039-630f-5f37-46b3c2ce933f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.140557] env[63371]: DEBUG nova.network.neutron [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.225569] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.465295] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Successfully updated port: 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1474.534773] env[63371]: DEBUG nova.compute.utils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1474.541889] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1474.541889] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1474.544822] env[63371]: DEBUG nova.compute.manager [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-vif-plugged-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1474.544822] env[63371]: DEBUG oslo_concurrency.lockutils [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.544822] env[63371]: DEBUG oslo_concurrency.lockutils [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.544822] env[63371]: DEBUG oslo_concurrency.lockutils [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.545043] env[63371]: DEBUG nova.compute.manager [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] No waiting events found dispatching network-vif-plugged-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1474.545043] env[63371]: WARNING nova.compute.manager [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received unexpected event network-vif-plugged-96760ebc-7de4-48e4-94ac-f0a3a2eab943 for instance with vm_state building and task_state spawning. [ 1474.560023] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.608507] env[63371]: DEBUG oslo_vmware.api [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773948, 'name': ResetVM_Task, 'duration_secs': 0.105612} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.608809] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Did hard reboot of VM {{(pid=63371) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1474.609012] env[63371]: DEBUG nova.compute.manager [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1474.610272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2e3f78-8be5-4901-9e1b-9be07d6294eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.641348] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b3562d-8039-630f-5f37-46b3c2ce933f, 'name': SearchDatastore_Task, 'duration_secs': 0.022302} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.645654] env[63371]: DEBUG nova.policy [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c65e6dd9e4468fb1a0235bac086151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4ca8a73414142d497ebd3d3f043d9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1474.647900] env[63371]: INFO nova.compute.manager [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Took 1.04 seconds to deallocate network for instance. [ 1474.648630] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ced9b72-9928-4036-b04e-7e0e53afbe74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.660025] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1474.660025] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523b4ac6-6ac7-4899-db6d-0704172cdeab" [ 1474.660025] env[63371]: _type = "Task" [ 1474.660025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.677421] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523b4ac6-6ac7-4899-db6d-0704172cdeab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.730253] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.853439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.853672] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.971193] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.971465] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.971536] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1475.046597] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1475.060029] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.084411] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Successfully created port: 9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1475.127802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.134s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.150264] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59398f8d-35b4-4403-93f7-40b12e15e51d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.158397] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.159665] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f84814-2073-43ee-9cf9-9484ab7b45fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.171984] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523b4ac6-6ac7-4899-db6d-0704172cdeab, 'name': SearchDatastore_Task, 'duration_secs': 0.021516} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.198322] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.198596] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf/aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1475.200525] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c875a56-47e3-4df5-aa0a-0ff8b6d19e08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.203082] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc134cd1-de44-49a3-b915-c2c78b1e28b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.216372] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ba841b-b499-46a8-8b9c-64b752e7ef2e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.221454] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1475.221454] env[63371]: value = "task-1773949" [ 1475.221454] env[63371]: _type = "Task" [ 1475.221454] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.236393] env[63371]: DEBUG nova.compute.provider_tree [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.243228] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.246192] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.509886] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1475.573818] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task, 'duration_secs': 1.94103} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.574157] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.574765] env[63371]: INFO nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Took 9.98 seconds to spawn the instance on the hypervisor. [ 1475.574765] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1475.575939] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d00593-1fca-433e-af0a-6f4d4f755b2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.710268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.710268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.710268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.710493] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.710825] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.715660] env[63371]: INFO nova.compute.manager [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Terminating instance [ 1475.718083] env[63371]: DEBUG nova.compute.manager [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1475.718297] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1475.719640] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df179a4a-c9e5-4ccb-89fc-7ef9880f88da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.739238] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773949, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.749241] env[63371]: DEBUG nova.scheduler.client.report [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1475.753513] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.753816] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1475.755324] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [{"id": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "address": "fa:16:3e:cb:30:e4", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96760ebc-7d", "ovs_interfaceid": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.756636] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f2d9569-1cc0-4e27-be57-37aeaaa0fef2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.764719] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1475.764719] env[63371]: value = "task-1773950" [ 1475.764719] env[63371]: _type = "Task" [ 1475.764719] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.778167] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.066924] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1476.094609] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1476.094899] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1476.095010] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1476.095405] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1476.096822] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1476.097399] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea740d8f-ab91-420f-ae37-c2a84676d251 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.102719] env[63371]: INFO nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Took 44.55 seconds to build instance. [ 1476.109486] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a137bdd-6cfb-4cb2-8749-ae4141e919e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.236408] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678029} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.239338] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf/aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1476.239585] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1476.239889] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.240083] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f781568-9394-4a2c-ac85-b0840d525830 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.247174] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1476.247174] env[63371]: value = "task-1773951" [ 1476.247174] env[63371]: _type = "Task" [ 1476.247174] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.256479] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.257296] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.227s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.259451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.567s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.261025] env[63371]: INFO nova.compute.claims [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1476.264018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.264185] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance network_info: |[{"id": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "address": "fa:16:3e:cb:30:e4", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96760ebc-7d", "ovs_interfaceid": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1476.264778] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:30:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca99f7a1-6365-4d3c-af16-1b1c1288091e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96760ebc-7de4-48e4-94ac-f0a3a2eab943', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.275207] env[63371]: DEBUG oslo.service.loopingcall [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.275207] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1476.277745] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-443f7662-2562-42fa-8ffa-9682c24d6713 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.296709] env[63371]: INFO nova.scheduler.client.report [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleted allocations for instance b48a8e83-e581-4886-833b-bbce155d40d9 [ 1476.302922] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773950, 'name': PowerOffVM_Task, 'duration_secs': 0.391757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.304485] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1476.304659] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1476.304912] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1476.304912] env[63371]: value = "task-1773952" [ 1476.304912] env[63371]: _type = "Task" [ 1476.304912] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.305665] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea692d67-af80-48ad-ada5-0336f993d10a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.316571] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773952, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.421495] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1476.421789] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1476.421996] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Deleting the datastore file [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1476.422362] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9719a79f-399f-40e2-90ef-58bf71a84bc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.431037] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1476.431037] env[63371]: value = "task-1773954" [ 1476.431037] env[63371]: _type = "Task" [ 1476.431037] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.440035] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.606771] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.568s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.723141] env[63371]: DEBUG nova.compute.manager [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.723348] env[63371]: DEBUG nova.compute.manager [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing instance network info cache due to event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1476.723562] env[63371]: DEBUG oslo_concurrency.lockutils [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.723702] env[63371]: DEBUG oslo_concurrency.lockutils [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.725658] env[63371]: DEBUG nova.network.neutron [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1476.743415] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.757800] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121976} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.758513] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1476.762019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bbd581-eee3-4438-a5ad-302d5815c45e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.790427] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf/aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1476.795316] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff3320c8-6208-4a3b-85e0-cf8b6ce16e2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.819669] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.314s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.829766] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773952, 'name': CreateVM_Task, 'duration_secs': 0.42088} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.830968] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1476.831314] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1476.831314] env[63371]: value = "task-1773955" [ 1476.831314] env[63371]: _type = "Task" [ 1476.831314] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.831970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.832222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.832478] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1476.832892] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21f9b217-8e03-411e-b48b-d97d3e286dc8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.843224] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1476.843224] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae055f-86be-3a11-1b70-d41466fde8bb" [ 1476.843224] env[63371]: _type = "Task" [ 1476.843224] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.847196] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773955, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.856996] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae055f-86be-3a11-1b70-d41466fde8bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.916344] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Successfully updated port: 9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1476.947833] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.460083} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.948348] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1476.948712] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1476.948712] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1476.948865] env[63371]: INFO nova.compute.manager [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1476.949148] env[63371]: DEBUG oslo.service.loopingcall [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.949338] env[63371]: DEBUG nova.compute.manager [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1476.949443] env[63371]: DEBUG nova.network.neutron [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1477.109367] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1477.249551] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.347786] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773955, 'name': ReconfigVM_Task, 'duration_secs': 0.284878} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.355113] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Reconfigured VM instance instance-0000002b to attach disk [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf/aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1477.356275] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e66b5194-6adf-49e5-b6e8-ab9c6ce5ba5b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.364180] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae055f-86be-3a11-1b70-d41466fde8bb, 'name': SearchDatastore_Task, 'duration_secs': 0.020969} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.365513] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.365761] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1477.366018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.366180] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.366365] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1477.366682] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1477.366682] env[63371]: value = "task-1773956" [ 1477.366682] env[63371]: _type = "Task" [ 1477.366682] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.366865] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90dc3e56-2934-4124-a9ab-f423d7dc254f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.381080] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773956, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.382140] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1477.382326] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1477.383088] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-132d0b24-e6c6-4e88-beb0-b0b308b2ec23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.388613] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1477.388613] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2643e-2ada-c22c-2ece-852a67a7a356" [ 1477.388613] env[63371]: _type = "Task" [ 1477.388613] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.399514] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2643e-2ada-c22c-2ece-852a67a7a356, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.421216] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3994fb0a-c787-4436-96c6-6274a7861b94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.425713] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.425713] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.425713] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.430598] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f83ba5-3af2-40c7-9050-699424022ba4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.462316] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "64fc862c-a755-4cac-997b-7a8328638269" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.462425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.462574] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "64fc862c-a755-4cac-997b-7a8328638269-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.462760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.462924] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.467569] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6ffcaa-9541-4603-a04b-d2580b408bc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.470563] env[63371]: INFO nova.compute.manager [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Terminating instance [ 1477.472727] env[63371]: DEBUG nova.compute.manager [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1477.472920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.474442] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6f3b70-373a-4ea7-a678-844c06f51345 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.483129] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cc31cc-808d-4167-bc36-4d461f19be92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.489938] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.490351] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42347a9c-e88c-4537-8147-65cb950b9cb5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.500338] env[63371]: DEBUG nova.compute.provider_tree [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.507310] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1477.507310] env[63371]: value = "task-1773957" [ 1477.507310] env[63371]: _type = "Task" [ 1477.507310] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.517510] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.555451] env[63371]: DEBUG nova.network.neutron [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updated VIF entry in instance network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1477.555722] env[63371]: DEBUG nova.network.neutron [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [{"id": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "address": "fa:16:3e:cb:30:e4", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96760ebc-7d", "ovs_interfaceid": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.628077] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.698489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "dcf8063b-56eb-439c-bee5-139a1e157714" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.698669] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.698888] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.699092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.699266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.701418] env[63371]: INFO nova.compute.manager [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Terminating instance [ 1477.703197] env[63371]: DEBUG nova.compute.manager [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1477.703364] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.704236] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8585fa61-0bc2-40b3-8c78-426e642e552b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.712558] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.712776] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1fe4fea-cdef-4105-96fb-1667added149 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.719186] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1477.719186] env[63371]: value = "task-1773958" [ 1477.719186] env[63371]: _type = "Task" [ 1477.719186] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.727168] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.736333] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.750965] env[63371]: DEBUG nova.network.neutron [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.879674] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773956, 'name': Rename_Task, 'duration_secs': 0.155122} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.880584] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1477.880584] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87e25313-79df-44d6-b702-2ded5962c99b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.887302] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1477.887302] env[63371]: value = "task-1773959" [ 1477.887302] env[63371]: _type = "Task" [ 1477.887302] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.899110] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.903493] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2643e-2ada-c22c-2ece-852a67a7a356, 'name': SearchDatastore_Task, 'duration_secs': 0.024302} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.904393] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eddcfce-a637-4cae-9c6f-b77d171d64c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.910352] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1477.910352] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d4a26e-4660-2ecb-5e05-24523fcfe0b3" [ 1477.910352] env[63371]: _type = "Task" [ 1477.910352] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.919523] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d4a26e-4660-2ecb-5e05-24523fcfe0b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.962638] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1478.004076] env[63371]: DEBUG nova.scheduler.client.report [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1478.018872] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773957, 'name': PowerOffVM_Task, 'duration_secs': 0.336813} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.019858] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.020052] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.020308] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c03b6ae-ca99-447b-8536-7739a3caa1b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.058431] env[63371]: DEBUG oslo_concurrency.lockutils [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.065501] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.065831] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.066339] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.066611] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.066790] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.068979] env[63371]: INFO nova.compute.manager [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Terminating instance [ 1478.074274] env[63371]: DEBUG nova.compute.manager [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1478.074472] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.075656] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10b1eed-56f7-4300-af1f-2bc2d395e341 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.083965] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1478.084252] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28a96417-ca08-49e6-8436-13a854336fb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.094646] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1478.094646] env[63371]: value = "task-1773961" [ 1478.094646] env[63371]: _type = "Task" [ 1478.094646] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.095608] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.095807] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.095986] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleting the datastore file [datastore1] 64fc862c-a755-4cac-997b-7a8328638269 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.099221] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-008bf87c-f71f-4de8-b2fb-172b0a077c84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.106330] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.107687] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1478.107687] env[63371]: value = "task-1773962" [ 1478.107687] env[63371]: _type = "Task" [ 1478.107687] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.115343] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.176858] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Updating instance_info_cache with network_info: [{"id": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "address": "fa:16:3e:eb:6a:af", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a55b6f2-f0", "ovs_interfaceid": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.230242] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773958, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.239498] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.254026] env[63371]: INFO nova.compute.manager [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Took 1.30 seconds to deallocate network for instance. [ 1478.398653] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773959, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.422252] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d4a26e-4660-2ecb-5e05-24523fcfe0b3, 'name': SearchDatastore_Task, 'duration_secs': 0.011598} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.422527] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.422825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39/7e66011a-4fed-471f-82ea-e1016f92ad39.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1478.423108] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5050eabc-1823-4e09-8307-d5c71953212e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.430029] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1478.430029] env[63371]: value = "task-1773963" [ 1478.430029] env[63371]: _type = "Task" [ 1478.430029] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.438826] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.514189] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.514189] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1478.516371] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.496s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.516644] env[63371]: DEBUG nova.objects.instance [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lazy-loading 'resources' on Instance uuid cfbd0c7c-243e-497a-acb1-ab9323c23574 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1478.606346] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773961, 'name': PowerOffVM_Task, 'duration_secs': 0.21064} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.606485] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.606621] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.606900] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61af4894-ff87-487b-bb8c-f2a7721cd29e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.616469] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171844} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.616708] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.616910] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1478.617105] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1478.617313] env[63371]: INFO nova.compute.manager [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1478.617570] env[63371]: DEBUG oslo.service.loopingcall [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.617766] env[63371]: DEBUG nova.compute.manager [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1478.617880] env[63371]: DEBUG nova.network.neutron [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1478.680121] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.680665] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Instance network_info: |[{"id": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "address": "fa:16:3e:eb:6a:af", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a55b6f2-f0", "ovs_interfaceid": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1478.681323] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:6a:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a55b6f2-f084-4989-9b8c-434c1a1deab6', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.689524] env[63371]: DEBUG oslo.service.loopingcall [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.690172] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1478.690446] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e4f30ba-d30c-4cdb-8ff1-04b92c011d75 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.715710] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.716018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.716141] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleting the datastore file [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.716404] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33a9c3ff-9819-4ecb-93f9-65be03daa24a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.719922] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.719922] env[63371]: value = "task-1773965" [ 1478.719922] env[63371]: _type = "Task" [ 1478.719922] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.728095] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1478.728095] env[63371]: value = "task-1773966" [ 1478.728095] env[63371]: _type = "Task" [ 1478.728095] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.738327] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773958, 'name': PowerOffVM_Task, 'duration_secs': 0.866059} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.738528] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773965, 'name': CreateVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.742203] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.742393] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.742681] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aafc2e5a-ca16-4b3c-97ff-8341001f41db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.750319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.750623] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.751723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.751723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.751723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.752913] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.756783] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task, 'duration_secs': 5.878851} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.757343] env[63371]: INFO nova.compute.manager [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Terminating instance [ 1478.759084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.759308] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfigured VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1478.762446] env[63371]: DEBUG nova.compute.manager [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1478.762653] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.763725] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.764448] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dc76ec-a1c5-41a0-ab32-963edc9177b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.774495] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1478.774807] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cee114bf-6602-419f-ac00-6d158bd3a8c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.783730] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1478.783730] env[63371]: value = "task-1773968" [ 1478.783730] env[63371]: _type = "Task" [ 1478.783730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.799137] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.842786] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Received event network-vif-plugged-9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.843206] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Acquiring lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.843476] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.843684] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.843948] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] No waiting events found dispatching network-vif-plugged-9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1478.844024] env[63371]: WARNING nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Received unexpected event network-vif-plugged-9a55b6f2-f084-4989-9b8c-434c1a1deab6 for instance with vm_state building and task_state spawning. [ 1478.844226] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Received event network-changed-9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.844389] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Refreshing instance network info cache due to event network-changed-9a55b6f2-f084-4989-9b8c-434c1a1deab6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1478.844577] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Acquiring lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.844711] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Acquired lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.844865] env[63371]: DEBUG nova.network.neutron [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Refreshing network info cache for port 9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.858143] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.858464] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.858680] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleting the datastore file [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.859245] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed6ddaee-fa3e-459a-8fe4-8f1d382ad30f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.867049] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1478.867049] env[63371]: value = "task-1773969" [ 1478.867049] env[63371]: _type = "Task" [ 1478.867049] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.876351] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.899803] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773959, 'name': PowerOnVM_Task, 'duration_secs': 0.565865} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.900121] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1478.900478] env[63371]: INFO nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1478.900704] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1478.901815] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7669ebc4-79bc-4f2a-b825-922d09a60a52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.941253] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773963, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.020820] env[63371]: DEBUG nova.compute.utils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1479.025769] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1479.026786] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1479.031644] env[63371]: DEBUG nova.compute.manager [req-cc3776d9-d064-4d0c-8c70-af70b25f37c1 req-0071475f-7c0f-4fca-85de-45bda1de0782 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Received event network-vif-deleted-59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1479.031863] env[63371]: INFO nova.compute.manager [req-cc3776d9-d064-4d0c-8c70-af70b25f37c1 req-0071475f-7c0f-4fca-85de-45bda1de0782 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Neutron deleted interface 59bb4dc3-13e6-4180-bec1-3a41954f8d62; detaching it from the instance and deleting it from the info cache [ 1479.032566] env[63371]: DEBUG nova.network.neutron [req-cc3776d9-d064-4d0c-8c70-af70b25f37c1 req-0071475f-7c0f-4fca-85de-45bda1de0782 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.094300] env[63371]: DEBUG nova.policy [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1b2f698ebd747d6a84ac3f3e05e97b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a5b81b233f640b186d9798ff57a4945', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1479.236360] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773965, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.242393] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.296904] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773968, 'name': PowerOffVM_Task, 'duration_secs': 0.353319} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.299837] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1479.300066] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1479.301522] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24832d49-aa50-4411-8db7-75b714c55e85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.376152] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349644} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.376432] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.376617] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.376788] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.376957] env[63371]: INFO nova.compute.manager [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1479.377208] env[63371]: DEBUG oslo.service.loopingcall [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.377401] env[63371]: DEBUG nova.compute.manager [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1479.377493] env[63371]: DEBUG nova.network.neutron [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.389014] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1479.390212] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1479.390524] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleting the datastore file [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1479.392620] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4efece65-753a-4526-93e6-4ad77b95228d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.396448] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1479.396448] env[63371]: value = "task-1773971" [ 1479.396448] env[63371]: _type = "Task" [ 1479.396448] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.409965] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.422584] env[63371]: INFO nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 45.13 seconds to build instance. [ 1479.445125] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67037} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.445394] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39/7e66011a-4fed-471f-82ea-e1016f92ad39.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1479.445606] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1479.445835] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29f0c5d5-9a9c-40a5-9489-5f409e0f47d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.455793] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1479.455793] env[63371]: value = "task-1773972" [ 1479.455793] env[63371]: _type = "Task" [ 1479.455793] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.464824] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773972, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.503167] env[63371]: DEBUG nova.network.neutron [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.530618] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1479.542740] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5271884-143e-4de2-96bb-7dc0dd97663c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.552160] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf59b941-ba26-499d-b675-ee2ce972f746 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.593354] env[63371]: DEBUG nova.compute.manager [req-cc3776d9-d064-4d0c-8c70-af70b25f37c1 req-0071475f-7c0f-4fca-85de-45bda1de0782 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Detach interface failed, port_id=59bb4dc3-13e6-4180-bec1-3a41954f8d62, reason: Instance 64fc862c-a755-4cac-997b-7a8328638269 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1479.594540] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Successfully created port: 3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1479.650416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb7243d-f5ff-4e92-bc1a-7a390c6329a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.666048] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc58988-67a0-4412-af1d-91fdbeb2a111 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.704612] env[63371]: DEBUG nova.network.neutron [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Updated VIF entry in instance network info cache for port 9a55b6f2-f084-4989-9b8c-434c1a1deab6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.705857] env[63371]: DEBUG nova.network.neutron [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Updating instance_info_cache with network_info: [{"id": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "address": "fa:16:3e:eb:6a:af", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a55b6f2-f0", "ovs_interfaceid": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.708084] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b452ba34-eb7a-41a8-bbc6-19a14fffe7bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.716773] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604589ce-3d56-4980-bd2d-6aebee3d3f2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.735335] env[63371]: DEBUG nova.compute.provider_tree [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1479.742519] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773965, 'name': CreateVM_Task, 'duration_secs': 0.527978} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.742881] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1479.745208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.745208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.745208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1479.745540] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dea2026c-4743-4a29-bf19-565cac8b7c61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.752548] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.575867} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.754083] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.754083] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.754083] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.754083] env[63371]: INFO nova.compute.manager [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1479.754261] env[63371]: DEBUG oslo.service.loopingcall [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.754405] env[63371]: DEBUG nova.compute.manager [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1479.754664] env[63371]: DEBUG nova.network.neutron [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.757631] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1479.757631] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528c3742-fc88-c1dd-8a44-e80eae1694da" [ 1479.757631] env[63371]: _type = "Task" [ 1479.757631] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.766644] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528c3742-fc88-c1dd-8a44-e80eae1694da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.777219] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.777538] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.777756] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.777930] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.778596] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.780113] env[63371]: INFO nova.compute.manager [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Terminating instance [ 1479.781899] env[63371]: DEBUG nova.compute.manager [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1479.782048] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1479.782846] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9438631-eb00-4006-a2cd-ac1e2517f6a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.795026] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1479.795026] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a3fef41-34fd-4317-b63c-268cec7f3ea6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.799028] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1479.799028] env[63371]: value = "task-1773973" [ 1479.799028] env[63371]: _type = "Task" [ 1479.799028] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.806664] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.907620] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314217} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.908034] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.908247] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.908468] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.908674] env[63371]: INFO nova.compute.manager [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1479.908918] env[63371]: DEBUG oslo.service.loopingcall [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.909119] env[63371]: DEBUG nova.compute.manager [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1479.909211] env[63371]: DEBUG nova.network.neutron [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.929763] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.159s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.969156] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773972, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214906} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.972128] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1479.972976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f92ab6f-2e0b-4be9-9663-8c474b5383b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.998028] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39/7e66011a-4fed-471f-82ea-e1016f92ad39.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1479.998028] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d9b084d-25d6-4c8c-941a-2bde20ccd278 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.012610] env[63371]: INFO nova.compute.manager [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Took 1.39 seconds to deallocate network for instance. [ 1480.024247] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1480.024247] env[63371]: value = "task-1773974" [ 1480.024247] env[63371]: _type = "Task" [ 1480.024247] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.034920] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773974, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.119215] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.119350] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.119531] env[63371]: DEBUG nova.network.neutron [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1480.212240] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Releasing lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.212537] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Received event network-vif-deleted-78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.244014] env[63371]: DEBUG nova.scheduler.client.report [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1480.270265] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528c3742-fc88-c1dd-8a44-e80eae1694da, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.270545] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.271216] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1480.271216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.271512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.271512] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1480.272480] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb624a58-24d9-44ee-9037-5caef072d544 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.280577] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1480.280781] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1480.281611] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-062edc76-4686-4f8d-a461-a33cd31f36cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.290484] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1480.290484] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6c0a8-766c-d174-1332-0833dcbdf2db" [ 1480.290484] env[63371]: _type = "Task" [ 1480.290484] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.300828] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6c0a8-766c-d174-1332-0833dcbdf2db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.308476] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773973, 'name': PowerOffVM_Task, 'duration_secs': 0.28681} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.308727] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1480.308892] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1480.309151] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7f1e4e5-76f7-4597-aa7e-5aa47b6193cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.340023] env[63371]: DEBUG nova.network.neutron [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.389148] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1480.389311] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1480.389498] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleting the datastore file [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1480.390199] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad5501e3-33b5-4447-b1e5-cab58a2a21f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.396429] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1480.396429] env[63371]: value = "task-1773976" [ 1480.396429] env[63371]: _type = "Task" [ 1480.396429] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.404665] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.432852] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1480.521854] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.529511] env[63371]: DEBUG nova.network.neutron [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.534689] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773974, 'name': ReconfigVM_Task, 'duration_secs': 0.29383} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.534784] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39/7e66011a-4fed-471f-82ea-e1016f92ad39.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1480.535714] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2775610b-b2be-4d5d-8570-83102540dd16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.543861] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1480.545866] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1480.545866] env[63371]: value = "task-1773977" [ 1480.545866] env[63371]: _type = "Task" [ 1480.545866] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.554496] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773977, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.564805] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1480.565075] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1480.565262] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1480.565449] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1480.565595] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1480.565763] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1480.565972] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1480.566144] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1480.566325] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1480.566515] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1480.566718] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1480.567705] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4e50e3-bd25-491f-a90c-e47fc07abf42 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.577021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de87b2a2-b3a4-4ba5-8f77-0c094a153054 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.750490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.234s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.752932] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.398s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.754722] env[63371]: INFO nova.compute.claims [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.780783] env[63371]: DEBUG nova.network.neutron [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.800517] env[63371]: INFO nova.scheduler.client.report [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleted allocations for instance cfbd0c7c-243e-497a-acb1-ab9323c23574 [ 1480.812729] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6c0a8-766c-d174-1332-0833dcbdf2db, 'name': SearchDatastore_Task, 'duration_secs': 0.010629} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.815941] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f96ad16-8a49-4bd8-9b07-3f215ffe2106 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.824926] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1480.824926] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521caad6-43c7-52b0-37cd-b72a6c01cefe" [ 1480.824926] env[63371]: _type = "Task" [ 1480.824926] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.833850] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521caad6-43c7-52b0-37cd-b72a6c01cefe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.842711] env[63371]: INFO nova.compute.manager [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Took 1.47 seconds to deallocate network for instance. [ 1480.908892] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39831} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.910440] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1480.910440] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1480.910440] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1480.910440] env[63371]: INFO nova.compute.manager [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1480.910440] env[63371]: DEBUG oslo.service.loopingcall [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.910440] env[63371]: DEBUG nova.compute.manager [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1480.910791] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1480.950315] env[63371]: INFO nova.network.neutron [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Port 8e143eba-fc86-4474-91f7-a5785bb2dbe3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1480.950315] env[63371]: DEBUG nova.network.neutron [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.958016] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.964178] env[63371]: DEBUG nova.compute.manager [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-vif-deleted-8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.964537] env[63371]: INFO nova.compute.manager [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Neutron deleted interface 8e143eba-fc86-4474-91f7-a5785bb2dbe3; detaching it from the instance and deleting it from the info cache [ 1480.965089] env[63371]: DEBUG nova.network.neutron [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.035401] env[63371]: INFO nova.compute.manager [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Took 1.28 seconds to deallocate network for instance. [ 1481.055950] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773977, 'name': Rename_Task, 'duration_secs': 0.14507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.058080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1481.058080] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4058b5a1-9011-4aae-8b64-b414ffe7c2cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.064046] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1481.064046] env[63371]: value = "task-1773978" [ 1481.064046] env[63371]: _type = "Task" [ 1481.064046] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.072403] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.194252] env[63371]: DEBUG nova.compute.manager [req-97d079f7-e491-4cfd-9391-e1419893e5da req-6fff9424-7b3d-4075-9af4-5f2ecf0a2b5a service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Received event network-vif-deleted-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1481.194486] env[63371]: DEBUG nova.compute.manager [req-97d079f7-e491-4cfd-9391-e1419893e5da req-6fff9424-7b3d-4075-9af4-5f2ecf0a2b5a service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Received event network-vif-deleted-912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1481.282888] env[63371]: INFO nova.compute.manager [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 1.37 seconds to deallocate network for instance. [ 1481.309724] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.306s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.338264] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521caad6-43c7-52b0-37cd-b72a6c01cefe, 'name': SearchDatastore_Task, 'duration_secs': 0.436946} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.338604] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.338870] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932/e6cd62ce-f6d2-4e5b-acbc-7527a94e0932.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1481.339146] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dff1a576-9a3d-414c-8e6c-d1dee144bd2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.348898] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1481.348898] env[63371]: value = "task-1773979" [ 1481.348898] env[63371]: _type = "Task" [ 1481.348898] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.350193] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.358366] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.436991] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Successfully updated port: 3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1481.452453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.469243] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2341766f-f18b-489e-9357-602140ab9803 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.479804] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ad1dbb-61cd-4481-94ed-d51813a7daed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.520063] env[63371]: DEBUG nova.compute.manager [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Detach interface failed, port_id=8e143eba-fc86-4474-91f7-a5785bb2dbe3, reason: Instance 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1481.520063] env[63371]: DEBUG nova.compute.manager [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Received event network-vif-deleted-17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1481.541313] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.578022] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773978, 'name': PowerOnVM_Task, 'duration_secs': 0.484974} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.578310] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1481.578510] env[63371]: INFO nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 8.45 seconds to spawn the instance on the hypervisor. [ 1481.578700] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1481.579521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192fd488-9c96-4a32-948d-722ed9de6d7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.620224] env[63371]: DEBUG nova.compute.manager [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1481.621083] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0cc905-899b-4492-bd66-08cc0779f006 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.792795] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.867300] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773979, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.940489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.940654] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.940802] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1481.958161] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-8e143eba-fc86-4474-91f7-a5785bb2dbe3" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.831s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.107152] env[63371]: INFO nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 35.45 seconds to build instance. [ 1482.131101] env[63371]: INFO nova.compute.manager [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] instance snapshotting [ 1482.133516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bbc383-433e-4715-80ae-10217b0d6aef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.161138] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36bf799-7b00-43e2-9d78-9e599e9ef8ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.222376] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.282626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767af8e9-1f21-4488-954d-b0c3f28e8965 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.290310] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de293364-e202-41b8-b28d-9f4e8f578511 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.325855] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee2fd4e-2cbc-4327-86b8-6f402024b076 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.333951] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe71efd-7345-4d1b-8790-4027dc8b8a76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.347890] env[63371]: DEBUG nova.compute.provider_tree [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.359551] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773979, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.489921] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1482.610562] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.948s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.673330] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1482.673330] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-21b7bed1-7e92-416c-8a82-7c1966bfcbf8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.683411] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1482.683411] env[63371]: value = "task-1773980" [ 1482.683411] env[63371]: _type = "Task" [ 1482.683411] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.700190] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773980, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.709556] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Updating instance_info_cache with network_info: [{"id": "3d978143-a770-4100-a97a-b0d9503712e0", "address": "fa:16:3e:08:d7:82", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d978143-a7", "ovs_interfaceid": "3d978143-a770-4100-a97a-b0d9503712e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.724083] env[63371]: INFO nova.compute.manager [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Took 1.81 seconds to deallocate network for instance. [ 1482.851653] env[63371]: DEBUG nova.scheduler.client.report [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1482.866881] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773979, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.369355} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.867190] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932/e6cd62ce-f6d2-4e5b-acbc-7527a94e0932.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1482.867405] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1482.867658] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-448592eb-cc20-4f65-9737-8e34b9faad39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.875368] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1482.875368] env[63371]: value = "task-1773981" [ 1482.875368] env[63371]: _type = "Task" [ 1482.875368] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.885368] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.115465] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1483.195338] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773980, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.214957] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.215313] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance network_info: |[{"id": "3d978143-a770-4100-a97a-b0d9503712e0", "address": "fa:16:3e:08:d7:82", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d978143-a7", "ovs_interfaceid": "3d978143-a770-4100-a97a-b0d9503712e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1483.215739] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:d7:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d978143-a770-4100-a97a-b0d9503712e0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1483.223446] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating folder: Project (2a5b81b233f640b186d9798ff57a4945). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1483.224044] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97ff8555-a126-4ad0-803b-34a96e8c958c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.230917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.235140] env[63371]: DEBUG nova.compute.manager [req-4a044077-06b7-4dcc-b3eb-a348884b9eec req-f794de87-a979-4298-a9ea-a108c1424f82 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-vif-deleted-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1483.236942] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created folder: Project (2a5b81b233f640b186d9798ff57a4945) in parent group-v368199. [ 1483.237152] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating folder: Instances. Parent ref: group-v368324. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1483.237417] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69af363e-97c6-4cfa-884b-e6af55fcc7b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.246442] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created folder: Instances in parent group-v368324. [ 1483.246693] env[63371]: DEBUG oslo.service.loopingcall [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1483.246895] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1483.247110] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49bb16f7-b83c-4183-b33a-3524f74ba6a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.266281] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1483.266281] env[63371]: value = "task-1773984" [ 1483.266281] env[63371]: _type = "Task" [ 1483.266281] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.273949] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773984, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.362519] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.363257] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1483.368636] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.401s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.369068] env[63371]: INFO nova.compute.claims [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.386191] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068384} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.386516] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1483.387595] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc974ed7-b227-4dbd-a650-742c327e1ddc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.419937] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932/e6cd62ce-f6d2-4e5b-acbc-7527a94e0932.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1483.421620] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b192858-693c-4d4b-974f-45b84140b7fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.445472] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1483.445472] env[63371]: value = "task-1773985" [ 1483.445472] env[63371]: _type = "Task" [ 1483.445472] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.450981] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Received event network-vif-plugged-3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1483.451205] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.451416] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.451612] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.451776] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] No waiting events found dispatching network-vif-plugged-3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1483.451952] env[63371]: WARNING nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Received unexpected event network-vif-plugged-3d978143-a770-4100-a97a-b0d9503712e0 for instance with vm_state building and task_state spawning. [ 1483.452110] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Received event network-changed-3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1483.452264] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Refreshing instance network info cache due to event network-changed-3d978143-a770-4100-a97a-b0d9503712e0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1483.452446] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquiring lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.452580] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquired lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.452731] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Refreshing network info cache for port 3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1483.460769] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773985, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.639547] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.696817] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773980, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.776757] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773984, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.874932] env[63371]: DEBUG nova.compute.utils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1483.885085] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1483.885085] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1483.937787] env[63371]: DEBUG nova.policy [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e4bf6cfe9124f3a9ea2df44c43611f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3df339d9a704d9b9bebecac3871584c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1483.959154] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773985, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.198972] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773980, 'name': CreateSnapshot_Task, 'duration_secs': 1.101817} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.199790] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1484.200717] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae0ca22-ef9d-4d63-b735-057841483fd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.277332] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773984, 'name': CreateVM_Task, 'duration_secs': 0.663234} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.277512] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1484.278213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.278451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.278680] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1484.278941] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dbea797-798b-4a2d-91af-ec31be17f030 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.285454] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1484.285454] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5227d947-8d3d-690c-ec67-cf93fe5c1051" [ 1484.285454] env[63371]: _type = "Task" [ 1484.285454] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.294415] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5227d947-8d3d-690c-ec67-cf93fe5c1051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.331736] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Successfully created port: 993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1484.337470] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Updated VIF entry in instance network info cache for port 3d978143-a770-4100-a97a-b0d9503712e0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1484.337823] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Updating instance_info_cache with network_info: [{"id": "3d978143-a770-4100-a97a-b0d9503712e0", "address": "fa:16:3e:08:d7:82", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d978143-a7", "ovs_interfaceid": "3d978143-a770-4100-a97a-b0d9503712e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.383200] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1484.468131] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773985, 'name': ReconfigVM_Task, 'duration_secs': 0.85} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.468131] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Reconfigured VM instance instance-0000002d to attach disk [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932/e6cd62ce-f6d2-4e5b-acbc-7527a94e0932.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1484.468369] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3ce6316-1882-4481-a55d-c190842c0693 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.475681] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1484.475681] env[63371]: value = "task-1773986" [ 1484.475681] env[63371]: _type = "Task" [ 1484.475681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.494215] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773986, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.719624] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1484.722493] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c5da356a-8e72-483e-b906-ef58c3847724 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.730687] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1484.730687] env[63371]: value = "task-1773987" [ 1484.730687] env[63371]: _type = "Task" [ 1484.730687] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.739052] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.797687] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5227d947-8d3d-690c-ec67-cf93fe5c1051, 'name': SearchDatastore_Task, 'duration_secs': 0.01065} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.800545] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.800867] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1484.802337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.802337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.802337] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1484.802337] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb28c6e9-b34f-4b7d-8d23-d27444797189 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.811350] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1484.811679] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1484.814744] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b33a150-faad-4409-93f5-e6e0ca96de5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.821150] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1484.821150] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52dd5136-18a6-8751-cc6d-507fbca178e9" [ 1484.821150] env[63371]: _type = "Task" [ 1484.821150] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.837401] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52dd5136-18a6-8751-cc6d-507fbca178e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.840389] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Releasing lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.840664] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1484.841179] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1484.841179] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.841179] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.841490] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1484.945144] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b994ff-a105-4aab-bb79-43671c66a55c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.952422] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9b9d5a-50c0-4c0e-b9ae-a98a8e673bfe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.989078] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b43d5ca-8215-4696-9995-783b9a4b1376 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.997493] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773986, 'name': Rename_Task, 'duration_secs': 0.267076} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.001663] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1485.001663] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14458ede-0479-41f0-abaf-0378a23ab8b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.003075] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456939b1-bca4-46fb-9b71-d0ea6f8e4f1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.018865] env[63371]: DEBUG nova.compute.provider_tree [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.021452] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1485.021452] env[63371]: value = "task-1773988" [ 1485.021452] env[63371]: _type = "Task" [ 1485.021452] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.029860] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773988, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.241876] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.334147] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52dd5136-18a6-8751-cc6d-507fbca178e9, 'name': SearchDatastore_Task, 'duration_secs': 0.030177} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.335347] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-107d4255-8403-4faf-b6eb-d3fe32943c9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.341141] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1485.341141] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5236545e-3ed6-dbbc-9c08-4c1e8f76ab0b" [ 1485.341141] env[63371]: _type = "Task" [ 1485.341141] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.351395] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5236545e-3ed6-dbbc-9c08-4c1e8f76ab0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.398633] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1485.425891] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1485.426193] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1485.426381] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1485.426611] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1485.426797] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1485.426976] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1485.427414] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1485.427615] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1485.427905] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1485.428456] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1485.428456] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1485.429713] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe41789a-c83d-4f2a-9f94-9b9ef108e408 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.443598] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a3f9cf-ee21-43d6-a506-e14c39b97c44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.523746] env[63371]: DEBUG nova.scheduler.client.report [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1485.536551] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773988, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.614834] env[63371]: DEBUG nova.compute.manager [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1485.615112] env[63371]: DEBUG nova.compute.manager [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1485.615353] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.678323] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1485.678714] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.745012] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.853470] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5236545e-3ed6-dbbc-9c08-4c1e8f76ab0b, 'name': SearchDatastore_Task, 'duration_secs': 0.040604} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.854052] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.854586] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c/594ff846-8e3e-4882-8ddc-41f824a77a5c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1485.855317] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e126883f-73f9-4690-8b41-e07f8874c7f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.862633] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1485.862633] env[63371]: value = "task-1773989" [ 1485.862633] env[63371]: _type = "Task" [ 1485.862633] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.871799] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773989, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.012445] env[63371]: DEBUG nova.compute.manager [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received event network-vif-plugged-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1486.012445] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.012445] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] Lock "b523486c-adae-4322-80be-1f3bf33ca192-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.012445] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] Lock "b523486c-adae-4322-80be-1f3bf33ca192-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.012445] env[63371]: DEBUG nova.compute.manager [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] No waiting events found dispatching network-vif-plugged-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1486.012445] env[63371]: WARNING nova.compute.manager [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received unexpected event network-vif-plugged-993ff886-27f6-48cd-be00-f0e8d292b060 for instance with vm_state building and task_state spawning. [ 1486.035019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.035019] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1486.036402] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.192s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.038156] env[63371]: INFO nova.compute.claims [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.047313] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773988, 'name': PowerOnVM_Task, 'duration_secs': 0.697824} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.047892] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1486.048235] env[63371]: INFO nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Took 9.98 seconds to spawn the instance on the hypervisor. [ 1486.048553] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1486.050345] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06a9b03-1258-4540-9aca-c76e591bc306 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.136819] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Successfully updated port: 993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1486.185562] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.185562] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.185562] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1486.250493] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.375952] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773989, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482084} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.376359] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c/594ff846-8e3e-4882-8ddc-41f824a77a5c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1486.376606] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1486.376919] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06d4df2a-56bd-4520-9a62-026636cdc780 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.385451] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1486.385451] env[63371]: value = "task-1773990" [ 1486.385451] env[63371]: _type = "Task" [ 1486.385451] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.395976] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773990, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.545976] env[63371]: DEBUG nova.compute.utils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1486.547806] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1486.548021] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1486.571355] env[63371]: INFO nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Took 34.75 seconds to build instance. [ 1486.626260] env[63371]: DEBUG nova.policy [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e88115fd7e2541e08000e93ef9ab0524', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6919de51a2ef456db7a25d4cec1e26ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1486.639126] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.639316] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.639496] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1486.744621] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.895426] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773990, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177024} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.895673] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1486.896493] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f425d77f-a287-4ca2-8324-513073a1d4d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.919591] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c/594ff846-8e3e-4882-8ddc-41f824a77a5c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1486.919869] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abe44092-5d72-4f02-8cc3-69830053aa3a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.943341] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1486.943341] env[63371]: value = "task-1773991" [ 1486.943341] env[63371]: _type = "Task" [ 1486.943341] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.952296] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773991, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.008666] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1487.008808] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.023744] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully created port: 2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.051719] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1487.073511] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.971s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.184966] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1487.249526] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task, 'duration_secs': 2.195936} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.252868] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created linked-clone VM from snapshot [ 1487.254936] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118caf6f-e2a5-4354-8cc7-cb4e7f6f339b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.264525] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Uploading image f30456ca-8289-4e09-8051-20b3017ac4ca {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1487.289320] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1487.289320] env[63371]: value = "vm-368328" [ 1487.289320] env[63371]: _type = "VirtualMachine" [ 1487.289320] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1487.289605] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c3a95de3-19bb-4271-82ff-a025a9eef000 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.296073] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lease: (returnval){ [ 1487.296073] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.296073] env[63371]: _type = "HttpNfcLease" [ 1487.296073] env[63371]: } obtained for exporting VM: (result){ [ 1487.296073] env[63371]: value = "vm-368328" [ 1487.296073] env[63371]: _type = "VirtualMachine" [ 1487.296073] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1487.296380] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the lease: (returnval){ [ 1487.296380] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.296380] env[63371]: _type = "HttpNfcLease" [ 1487.296380] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1487.306045] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1487.306045] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.306045] env[63371]: _type = "HttpNfcLease" [ 1487.306045] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1487.344411] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully created port: 0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.400241] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.453593] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773991, 'name': ReconfigVM_Task, 'duration_secs': 0.278961} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.456199] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c/594ff846-8e3e-4882-8ddc-41f824a77a5c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1487.457205] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc80a647-043d-48e4-8eb5-557eed3144b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.463822] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1487.463822] env[63371]: value = "task-1773993" [ 1487.463822] env[63371]: _type = "Task" [ 1487.463822] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.473601] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773993, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.512966] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.512966] env[63371]: DEBUG nova.compute.manager [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1487.512966] env[63371]: DEBUG nova.compute.manager [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing instance network info cache due to event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1487.513150] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.513150] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.513303] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1487.556427] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff9bde4-3f37-4363-8605-5ac0567b58e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.568516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d34b1a2-af0f-4aba-88ef-5ab9cbf8addd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.607327] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1487.610340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.610610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.610822] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.610994] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.611179] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.613719] env[63371]: INFO nova.compute.manager [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Terminating instance [ 1487.615868] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d037e767-3a90-460d-a8cb-d180c52deb51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.619163] env[63371]: DEBUG nova.compute.manager [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1487.619462] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1487.620277] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992c51e2-0cae-4f17-b246-0bbafdcbc0e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.631198] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a547fb7-ffa2-43de-a15f-077ffe394a82 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.635888] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1487.636109] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d6d0872-d732-43b9-8752-a8c5056e82ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.650983] env[63371]: DEBUG nova.compute.provider_tree [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1487.654315] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1487.654315] env[63371]: value = "task-1773994" [ 1487.654315] env[63371]: _type = "Task" [ 1487.654315] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.666872] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.805582] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1487.805582] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.805582] env[63371]: _type = "HttpNfcLease" [ 1487.805582] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1487.805582] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1487.805582] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.805582] env[63371]: _type = "HttpNfcLease" [ 1487.805582] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1487.807074] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3ba7f9-da21-4c30-8dce-e1f7180b510e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.812989] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1487.813307] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1487.872016] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully created port: f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.907069] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.907069] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Instance network_info: |[{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1487.907069] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:89:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '993ff886-27f6-48cd-be00-f0e8d292b060', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1487.912094] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Creating folder: Project (f3df339d9a704d9b9bebecac3871584c). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1487.912446] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a5097d9-9f47-4612-932a-4690e68440fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.922895] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Created folder: Project (f3df339d9a704d9b9bebecac3871584c) in parent group-v368199. [ 1487.923111] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Creating folder: Instances. Parent ref: group-v368329. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1487.923372] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-713276ab-e17b-4f58-b7e9-dd4984ca6ab7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.931623] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Created folder: Instances in parent group-v368329. [ 1487.931871] env[63371]: DEBUG oslo.service.loopingcall [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1487.932070] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1487.932287] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce8f153e-307a-453b-8b96-d8c436a7fa3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.952926] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7ae84007-f576-49ba-a46c-6fbab18482cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.956830] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1487.956830] env[63371]: value = "task-1773997" [ 1487.956830] env[63371]: _type = "Task" [ 1487.956830] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.967930] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773997, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.977022] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773993, 'name': Rename_Task, 'duration_secs': 0.145549} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.981021] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1487.981021] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-540126dd-2664-479e-8a2b-985976982696 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.987358] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1487.987358] env[63371]: value = "task-1773998" [ 1487.987358] env[63371]: _type = "Task" [ 1487.987358] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.004453] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.066223] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1488.094470] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1488.094706] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1488.094851] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1488.096317] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1488.096317] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1488.097192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb545a1c-ef48-42d0-bc97-59ae92cf1ad5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.106334] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6cfaae-b9da-47df-94f7-53d1d0aa09fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.153797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.156208] env[63371]: DEBUG nova.scheduler.client.report [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1488.171899] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773994, 'name': PowerOffVM_Task, 'duration_secs': 0.45618} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.172178] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1488.172341] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1488.172585] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6df90de-c58c-4a45-8d24-c6f2795d14ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.254026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1488.254647] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1488.254765] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleting the datastore file [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1488.255093] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48a23219-9cdf-4238-9858-b3296e4de8de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.264292] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1488.264292] env[63371]: value = "task-1774000" [ 1488.264292] env[63371]: _type = "Task" [ 1488.264292] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.272557] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.277607] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "1c93487b-6d8f-424d-8b95-10bfb894c609" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.277870] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.364843] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updated VIF entry in instance network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1488.365233] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [{"id": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "address": "fa:16:3e:cb:30:e4", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96760ebc-7d", "ovs_interfaceid": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.467120] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773997, 'name': CreateVM_Task, 'duration_secs': 0.369673} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.467413] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1488.467982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.468376] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.468508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1488.468725] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cef0fb30-7634-4141-a2bc-a4e487c5d802 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.473470] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1488.473470] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52321aa7-b8a3-9011-05bb-16e32060d088" [ 1488.473470] env[63371]: _type = "Task" [ 1488.473470] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.481893] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52321aa7-b8a3-9011-05bb-16e32060d088, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.500307] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773998, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.575585] env[63371]: DEBUG nova.compute.manager [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received event network-changed-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1488.575912] env[63371]: DEBUG nova.compute.manager [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Refreshing instance network info cache due to event network-changed-993ff886-27f6-48cd-be00-f0e8d292b060. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1488.576191] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Acquiring lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.576398] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Acquired lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.576974] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Refreshing network info cache for port 993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1488.662656] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.663580] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1488.669717] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.611s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.670040] env[63371]: DEBUG nova.objects.instance [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lazy-loading 'resources' on Instance uuid e00c2e45-b8bc-440b-8b58-a21f127192c7 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1488.775215] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227808} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.778749] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1488.778749] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1488.778749] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1488.778749] env[63371]: INFO nova.compute.manager [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1488.778749] env[63371]: DEBUG oslo.service.loopingcall [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1488.778749] env[63371]: DEBUG nova.compute.manager [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1488.778749] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1488.869322] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.987950] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52321aa7-b8a3-9011-05bb-16e32060d088, 'name': SearchDatastore_Task, 'duration_secs': 0.012417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.988490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.988745] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1488.988973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.989412] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.989629] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1488.989937] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f8442be-b3e1-4ab2-b0d8-f49586591c45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.004068] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773998, 'name': PowerOnVM_Task, 'duration_secs': 0.555855} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.006281] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1489.009805] env[63371]: INFO nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1489.009805] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1489.009805] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1489.009805] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1489.009805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626e0b11-b1c5-4644-a7f2-95454763f270 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.011500] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c3f41e0-852f-4097-a395-50adeeca53c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.021920] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1489.021920] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52292885-3021-8130-5634-cec2137e330f" [ 1489.021920] env[63371]: _type = "Task" [ 1489.021920] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.033491] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52292885-3021-8130-5634-cec2137e330f, 'name': SearchDatastore_Task, 'duration_secs': 0.010292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.034505] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e950d806-9f4f-47ff-b7ae-da9f8c6b45b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.040255] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1489.040255] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526504b5-fca5-ab91-b4d1-d2ada27d1011" [ 1489.040255] env[63371]: _type = "Task" [ 1489.040255] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.050259] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526504b5-fca5-ab91-b4d1-d2ada27d1011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.171396] env[63371]: DEBUG nova.compute.utils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1489.173928] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1489.173928] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1489.312042] env[63371]: DEBUG nova.policy [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6aa709a53564231ac25fb3e878239ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c76a64c712ca4aa98c19600ef0469855', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1489.453429] env[63371]: DEBUG nova.compute.manager [req-b5d2038d-49f2-4e93-8440-35ed875ac530 req-b930ac52-766c-4fff-a2b0-ce1956ffffcd service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-vif-deleted-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.453683] env[63371]: INFO nova.compute.manager [req-b5d2038d-49f2-4e93-8440-35ed875ac530 req-b930ac52-766c-4fff-a2b0-ce1956ffffcd service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Neutron deleted interface 96760ebc-7de4-48e4-94ac-f0a3a2eab943; detaching it from the instance and deleting it from the info cache [ 1489.453936] env[63371]: DEBUG nova.network.neutron [req-b5d2038d-49f2-4e93-8440-35ed875ac530 req-b930ac52-766c-4fff-a2b0-ce1956ffffcd service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.514992] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updated VIF entry in instance network info cache for port 993ff886-27f6-48cd-be00-f0e8d292b060. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1489.515524] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.536577] env[63371]: INFO nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Took 32.87 seconds to build instance. [ 1489.556772] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526504b5-fca5-ab91-b4d1-d2ada27d1011, 'name': SearchDatastore_Task, 'duration_secs': 0.010553} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.562046] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.562409] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b523486c-adae-4322-80be-1f3bf33ca192/b523486c-adae-4322-80be-1f3bf33ca192.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1489.563695] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69ae438d-efe4-4841-9266-30e2b90a35de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.572708] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1489.572708] env[63371]: value = "task-1774001" [ 1489.572708] env[63371]: _type = "Task" [ 1489.572708] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.588841] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.649581] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.676694] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1489.790433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d92888c-7c60-4e0e-a41c-a717e0d6edd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.803108] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7de8f29-f9bb-44f7-b143-603b70671061 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.812649] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Successfully created port: e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1489.844795] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d27898-3f42-4c6f-80ae-c5c7215e71b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.855030] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c6497c-aa2f-426a-99d7-c8a35f257fa6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.873181] env[63371]: DEBUG nova.compute.provider_tree [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1489.959798] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d90381f-49a6-46db-ac92-006796b2103c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.970319] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f0cc14-3bb0-4bcd-995d-c2db5d0b0e9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.010211] env[63371]: DEBUG nova.compute.manager [req-b5d2038d-49f2-4e93-8440-35ed875ac530 req-b930ac52-766c-4fff-a2b0-ce1956ffffcd service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Detach interface failed, port_id=96760ebc-7de4-48e4-94ac-f0a3a2eab943, reason: Instance 7e66011a-4fed-471f-82ea-e1016f92ad39 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1490.019087] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Releasing lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.019375] env[63371]: DEBUG nova.compute.manager [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1490.020023] env[63371]: DEBUG nova.compute.manager [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing instance network info cache due to event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1490.020996] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.021204] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.021378] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.040436] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.184s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.089573] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774001, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.125472] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully updated port: 2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1490.154716] env[63371]: INFO nova.compute.manager [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 1.38 seconds to deallocate network for instance. [ 1490.376838] env[63371]: DEBUG nova.scheduler.client.report [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1490.539739] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1490.543178] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1490.588221] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.588522] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b523486c-adae-4322-80be-1f3bf33ca192/b523486c-adae-4322-80be-1f3bf33ca192.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1490.588707] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1490.588971] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40c97e6c-2a0c-46c6-99f2-6114352ec671 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.596507] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1490.596507] env[63371]: value = "task-1774002" [ 1490.596507] env[63371]: _type = "Task" [ 1490.596507] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.608917] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.661342] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.663815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.689334] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1490.714895] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:32:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bb3da7ed-b700-420c-a825-23c0d1a3f881',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2130760861',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1490.715480] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1490.715480] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1490.715619] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1490.715658] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1490.715815] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1490.716040] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1490.716203] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1490.716367] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1490.716524] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1490.716690] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1490.717561] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa7e56a-785d-4c43-ac2b-77eff8c47707 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.725757] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07918d7d-6ebe-40df-9929-e6c53c0069e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.882237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.886093] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1490.886093] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1490.886093] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.886537] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.886537] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.887743] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.618s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.888101] env[63371]: DEBUG nova.objects.instance [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lazy-loading 'resources' on Instance uuid cbcdfe1a-86a4-4a12-99b5-44d291d41769 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1490.916687] env[63371]: INFO nova.scheduler.client.report [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Deleted allocations for instance e00c2e45-b8bc-440b-8b58-a21f127192c7 [ 1491.066691] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.111453] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073431} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.112390] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1491.112856] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac73b958-3743-474c-90d1-dc1c4b6c3ffe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.137717] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] b523486c-adae-4322-80be-1f3bf33ca192/b523486c-adae-4322-80be-1f3bf33ca192.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1491.137717] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cd0369a-6a07-4cd0-9053-8977201987a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.159662] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1491.159662] env[63371]: value = "task-1774003" [ 1491.159662] env[63371]: _type = "Task" [ 1491.159662] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.165609] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.171732] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774003, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.234466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.235475] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.235868] env[63371]: DEBUG nova.compute.manager [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1491.237356] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c0c257-dd72-44a2-a921-3bb0afd6c02a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.245610] env[63371]: DEBUG nova.compute.manager [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1491.246631] env[63371]: DEBUG nova.objects.instance [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lazy-loading 'flavor' on Instance uuid 594ff846-8e3e-4882-8ddc-41f824a77a5c {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1491.264834] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.432199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.840s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.518406] env[63371]: DEBUG nova.compute.manager [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Received event network-vif-plugged-e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1491.519132] env[63371]: DEBUG oslo_concurrency.lockutils [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] Acquiring lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.519316] env[63371]: DEBUG oslo_concurrency.lockutils [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.519495] env[63371]: DEBUG oslo_concurrency.lockutils [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.519666] env[63371]: DEBUG nova.compute.manager [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] No waiting events found dispatching network-vif-plugged-e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1491.519826] env[63371]: WARNING nova.compute.manager [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Received unexpected event network-vif-plugged-e13a7d6d-6643-4b64-a4b1-2a59397c5307 for instance with vm_state building and task_state spawning. [ 1491.674959] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774003, 'name': ReconfigVM_Task, 'duration_secs': 0.359094} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.675313] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Reconfigured VM instance instance-0000002f to attach disk [datastore1] b523486c-adae-4322-80be-1f3bf33ca192/b523486c-adae-4322-80be-1f3bf33ca192.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1491.676050] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf9ae12e-8bbe-41cd-97c8-8d2bd9f6f700 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.685382] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1491.685382] env[63371]: value = "task-1774004" [ 1491.685382] env[63371]: _type = "Task" [ 1491.685382] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.699933] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774004, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.751888] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1491.752204] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac44a05f-1ac7-4df0-a60b-4a9c8777e43f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.759970] env[63371]: DEBUG oslo_vmware.api [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1491.759970] env[63371]: value = "task-1774005" [ 1491.759970] env[63371]: _type = "Task" [ 1491.759970] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.769166] env[63371]: DEBUG oslo_vmware.api [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.815011] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Successfully updated port: e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1491.836207] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.836915] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.001797] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9447b2-f2b3-4b7e-a574-cfd2f5673a89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.013513] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac6432f-d344-4b85-a6f2-5a1dac2c88a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.044577] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7605d7ff-d812-446f-9989-27482a768043 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.052772] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8963124f-421b-4324-bacd-16e584b9a670 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.067855] env[63371]: DEBUG nova.compute.provider_tree [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1492.200506] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774004, 'name': Rename_Task, 'duration_secs': 0.163363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.200786] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1492.201066] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b5edba7-13fc-4a0b-beb8-7ac86f4798ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.207308] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1492.207308] env[63371]: value = "task-1774006" [ 1492.207308] env[63371]: _type = "Task" [ 1492.207308] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.219627] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.269803] env[63371]: DEBUG oslo_vmware.api [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774005, 'name': PowerOffVM_Task, 'duration_secs': 0.236749} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.270056] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1492.270238] env[63371]: DEBUG nova.compute.manager [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1492.271054] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23f2de0-2d51-4dab-a0a4-469e5e2f1f72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.317936] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.318174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.318286] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.343325] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.343549] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-plugged-2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.343746] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.343941] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.344115] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.344280] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] No waiting events found dispatching network-vif-plugged-2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1492.344443] env[63371]: WARNING nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received unexpected event network-vif-plugged-2691ba66-0c30-4f84-af20-63a2d5a37564 for instance with vm_state building and task_state spawning. [ 1492.344599] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.344828] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1492.345067] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.345215] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.345385] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.408609] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully updated port: 0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1492.573030] env[63371]: DEBUG nova.scheduler.client.report [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1492.718194] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774006, 'name': PowerOnVM_Task, 'duration_secs': 0.50165} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.718530] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1492.718740] env[63371]: INFO nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Took 7.32 seconds to spawn the instance on the hypervisor. [ 1492.718930] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1492.719856] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187bc59f-89ab-4cd7-9b48-0fdedcdc47f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.789904] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.553s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.868331] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1493.079294] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.191s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.082047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.980s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.082275] env[63371]: DEBUG nova.objects.instance [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lazy-loading 'resources' on Instance uuid b5e259ea-d103-41c6-84b3-748813bb514d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1493.103486] env[63371]: INFO nova.scheduler.client.report [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Deleted allocations for instance cbcdfe1a-86a4-4a12-99b5-44d291d41769 [ 1493.143867] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.215904] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1493.216327] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.237733] env[63371]: INFO nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Took 34.91 seconds to build instance. [ 1493.615864] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.888s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.653516] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.653833] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Instance network_info: |[{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1493.654299] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:1c:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e13a7d6d-6643-4b64-a4b1-2a59397c5307', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.666061] env[63371]: DEBUG oslo.service.loopingcall [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.669825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1493.670362] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-118fff70-a944-4564-ba07-3771ed0fb23c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.693998] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.693998] env[63371]: value = "task-1774007" [ 1493.693998] env[63371]: _type = "Task" [ 1493.693998] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.702680] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774007, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.719690] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.719962] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-changed-2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.720250] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing instance network info cache due to event network-changed-2691ba66-0c30-4f84-af20-63a2d5a37564. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1493.720335] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquiring lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.720471] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquired lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.720663] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing network info cache for port 2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.742793] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.229s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.796823] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Received event network-changed-e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.796954] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Refreshing instance network info cache due to event network-changed-e13a7d6d-6643-4b64-a4b1-2a59397c5307. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1493.797149] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.797289] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.797464] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Refreshing network info cache for port e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1494.120081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739f537b-62ee-4041-8a63-b5994f701e87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.129192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19333a56-fd62-4c09-a159-1852e72a43f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.167417] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d4b82d-9906-44a2-94b8-e107a1b2f12c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.178719] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75ba0c5-dc6b-453d-bff9-d64f9d48f05c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.195862] env[63371]: DEBUG nova.compute.provider_tree [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.210917] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774007, 'name': CreateVM_Task, 'duration_secs': 0.514428} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.211907] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1494.212718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.212898] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.213252] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1494.213769] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9de625ae-cf64-4b40-8973-640988932a21 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.219539] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1494.219539] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529c75d8-0685-572a-10a7-708919600edd" [ 1494.219539] env[63371]: _type = "Task" [ 1494.219539] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.236555] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529c75d8-0685-572a-10a7-708919600edd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.280641] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1494.413144] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.702136] env[63371]: DEBUG nova.scheduler.client.report [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1494.731021] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529c75d8-0685-572a-10a7-708919600edd, 'name': SearchDatastore_Task, 'duration_secs': 0.017729} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.731375] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.731555] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.731830] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.732062] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.732189] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1494.732469] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bac07b53-2e6b-4ad7-a9b1-3f7e4c1f989e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.741750] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1494.741957] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1494.742733] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba6ae100-49a9-405e-b01d-af00a1c12342 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.749106] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1494.749106] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5249c16a-fca7-6361-70f2-09aa30eeee87" [ 1494.749106] env[63371]: _type = "Task" [ 1494.749106] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.758194] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5249c16a-fca7-6361-70f2-09aa30eeee87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.775577] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updated VIF entry in instance network info cache for port e13a7d6d-6643-4b64-a4b1-2a59397c5307. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1494.775943] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.918202] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Releasing lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.033764] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully updated port: f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1495.207961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.126s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.211936] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.075s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.212968] env[63371]: DEBUG nova.objects.instance [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lazy-loading 'resources' on Instance uuid 201a2d1e-9e2c-4c07-92be-200408874ad4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.219105] env[63371]: DEBUG nova.compute.manager [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1495.220022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe96c7e1-81ab-497b-a8a8-a1ed668a39f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.242360] env[63371]: INFO nova.scheduler.client.report [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted allocations for instance b5e259ea-d103-41c6-84b3-748813bb514d [ 1495.265073] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5249c16a-fca7-6361-70f2-09aa30eeee87, 'name': SearchDatastore_Task, 'duration_secs': 0.017189} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.266019] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2100890c-7a5b-4450-9044-9d1474c18b13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.272395] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1495.272395] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5297c9bb-cc98-2d47-a774-a75c0988d21b" [ 1495.272395] env[63371]: _type = "Task" [ 1495.272395] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.281932] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.282362] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-plugged-0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.282647] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.282948] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.283175] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.283990] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] No waiting events found dispatching network-vif-plugged-0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1495.283990] env[63371]: WARNING nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received unexpected event network-vif-plugged-0fad7cfe-3cf5-4996-9d68-50d1919577c7 for instance with vm_state building and task_state spawning. [ 1495.283990] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-changed-0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.283990] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing instance network info cache due to event network-changed-0fad7cfe-3cf5-4996-9d68-50d1919577c7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1495.284344] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquiring lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.284413] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquired lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.284614] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing network info cache for port 0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.285814] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5297c9bb-cc98-2d47-a774-a75c0988d21b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.539510] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.735367] env[63371]: INFO nova.compute.manager [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] instance snapshotting [ 1495.735734] env[63371]: WARNING nova.compute.manager [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1495.741521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a899a62f-82cc-465b-a0e2-b6f9b01810a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.758243] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.079s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.785181] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e42fa9b-e80c-430a-8c04-d364b42f3bea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.806884] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5297c9bb-cc98-2d47-a774-a75c0988d21b, 'name': SearchDatastore_Task, 'duration_secs': 0.013455} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.813963] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.815224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1495.815454] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ab7e5e7-ef1b-4ea2-b33d-8cc8fe7c1106 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.824776] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1495.824776] env[63371]: value = "task-1774008" [ 1495.824776] env[63371]: _type = "Task" [ 1495.824776] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.835381] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.839136] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received event network-changed-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.839136] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Refreshing instance network info cache due to event network-changed-993ff886-27f6-48cd-be00-f0e8d292b060. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1495.839136] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquiring lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.839136] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquired lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.839136] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Refreshing network info cache for port 993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.910066] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1496.051520] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.294298] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf20b37-85e5-4445-8134-88819a56d172 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.303028] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf439fa7-67a2-4bd7-8d96-a3e952c1a5d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.338399] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1496.339569] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bbaadeda-ccf7-4487-8b07-9beaf1ccd439 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.346022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257ae675-3e20-4f09-bfa4-ed203b956821 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.357827] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774008, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.362298] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1496.362298] env[63371]: value = "task-1774009" [ 1496.362298] env[63371]: _type = "Task" [ 1496.362298] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.364097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba50473-87a1-4fe8-bc77-e0e0dc9719ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.391673] env[63371]: DEBUG nova.compute.provider_tree [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.394235] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774009, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.554211] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Releasing lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.554704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.554883] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1496.724568] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1496.725509] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f12c156-fc0c-457c-9f91-57a1a6fa45e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.732454] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1496.732454] env[63371]: ERROR oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk due to incomplete transfer. [ 1496.732454] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d100461f-e1b4-4606-b81c-9a86bf8e16d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.740502] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1496.740502] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Uploaded image f30456ca-8289-4e09-8051-20b3017ac4ca to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1496.741573] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1496.741830] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a05d5cdf-b6bc-4396-991c-53f13a9e4f54 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.747782] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1496.747782] env[63371]: value = "task-1774010" [ 1496.747782] env[63371]: _type = "Task" [ 1496.747782] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.755518] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774010, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.853739] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711668} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.853739] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1496.853982] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1496.854239] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ff61186-22d9-408b-83f1-f4e84fd9e9b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.861997] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1496.861997] env[63371]: value = "task-1774011" [ 1496.861997] env[63371]: _type = "Task" [ 1496.861997] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.878447] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.880097] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774009, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.895456] env[63371]: DEBUG nova.scheduler.client.report [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1496.941487] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updated VIF entry in instance network info cache for port 993ff886-27f6-48cd-be00-f0e8d292b060. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1496.943456] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.097155] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1497.264827] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774010, 'name': Destroy_Task, 'duration_secs': 0.435952} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.265415] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroyed the VM [ 1497.265814] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1497.266890] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5bd59f4b-e48f-4fee-8c57-a8e82d2f59ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.273862] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1497.273862] env[63371]: value = "task-1774012" [ 1497.273862] env[63371]: _type = "Task" [ 1497.273862] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.284166] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774012, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.377105] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774011, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119717} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.381814] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1497.382281] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774009, 'name': CreateSnapshot_Task, 'duration_secs': 0.746016} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.385395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eaf231-49bb-423b-8119-83fb45824c8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.388619] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1497.390976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fe5e7b-3ed8-478c-8c9f-3c1a10abbe4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.407305] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.423734] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1497.427719] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.985s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.427950] env[63371]: DEBUG nova.objects.instance [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'resources' on Instance uuid 855005ae-3b0e-4ad7-80cf-266075fc6d0f {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1497.429327] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d7da1c4-16d2-4e12-9a0b-1fa53cc469ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.452892] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Releasing lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.453173] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-plugged-f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1497.453382] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.453544] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.453698] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.453870] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] No waiting events found dispatching network-vif-plugged-f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1497.454175] env[63371]: WARNING nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received unexpected event network-vif-plugged-f99cf773-dc88-4581-961d-63fdebbf96ff for instance with vm_state building and task_state spawning. [ 1497.454175] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-changed-f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1497.454333] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing instance network info cache due to event network-changed-f99cf773-dc88-4581-961d-63fdebbf96ff. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1497.454495] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquiring lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.460966] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1497.460966] env[63371]: value = "task-1774013" [ 1497.460966] env[63371]: _type = "Task" [ 1497.460966] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.472719] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774013, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.472719] env[63371]: INFO nova.scheduler.client.report [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleted allocations for instance 201a2d1e-9e2c-4c07-92be-200408874ad4 [ 1497.784719] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774012, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.807685] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [{"id": "2691ba66-0c30-4f84-af20-63a2d5a37564", "address": "fa:16:3e:fc:f0:88", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2691ba66-0c", "ovs_interfaceid": "2691ba66-0c30-4f84-af20-63a2d5a37564", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "address": "fa:16:3e:46:df:60", "network": {"id": "471b81c3-7351-4064-a32c-f718bdf819c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-590329125", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fad7cfe-3c", "ovs_interfaceid": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f99cf773-dc88-4581-961d-63fdebbf96ff", "address": "fa:16:3e:86:be:16", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf773-dc", "ovs_interfaceid": "f99cf773-dc88-4581-961d-63fdebbf96ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.962702] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1497.963894] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fbd5307f-352b-425b-b5f5-116c2e79d90e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.983202] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774013, 'name': ReconfigVM_Task, 'duration_secs': 0.403547} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.983358] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1497.983358] env[63371]: value = "task-1774014" [ 1497.983358] env[63371]: _type = "Task" [ 1497.983358] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.983774] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.178s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.984795] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1497.985471] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe98db69-a1fd-4ef6-9a02-0466c1bbb15f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.000790] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774014, 'name': CloneVM_Task} progress is 11%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.005115] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1498.005115] env[63371]: value = "task-1774015" [ 1498.005115] env[63371]: _type = "Task" [ 1498.005115] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.045870] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71edb401-b664-44d1-9aa0-3b156eb85a77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.060255] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307ebae5-524a-43ff-89b0-0356a17f48e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.094033] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e064ad7-0dfb-4070-b27c-b94f2cc1f831 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.102813] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9383297-e948-4b70-aecd-878529e9cab1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.118192] env[63371]: DEBUG nova.compute.provider_tree [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.287582] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774012, 'name': RemoveSnapshot_Task, 'duration_secs': 0.682545} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.287582] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1498.287582] env[63371]: INFO nova.compute.manager [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 16.15 seconds to snapshot the instance on the hypervisor. [ 1498.313023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.313023] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance network_info: |[{"id": "2691ba66-0c30-4f84-af20-63a2d5a37564", "address": "fa:16:3e:fc:f0:88", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2691ba66-0c", "ovs_interfaceid": "2691ba66-0c30-4f84-af20-63a2d5a37564", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "address": "fa:16:3e:46:df:60", "network": {"id": "471b81c3-7351-4064-a32c-f718bdf819c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-590329125", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fad7cfe-3c", "ovs_interfaceid": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f99cf773-dc88-4581-961d-63fdebbf96ff", "address": "fa:16:3e:86:be:16", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf773-dc", "ovs_interfaceid": "f99cf773-dc88-4581-961d-63fdebbf96ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1498.313023] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquired lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.313023] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing network info cache for port f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1498.313362] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:f0:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2691ba66-0c30-4f84-af20-63a2d5a37564', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:df:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0d2101e-2d93-4310-a242-af2d9ecdaf9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0fad7cfe-3cf5-4996-9d68-50d1919577c7', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:be:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f99cf773-dc88-4581-961d-63fdebbf96ff', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1498.324124] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Creating folder: Project (6919de51a2ef456db7a25d4cec1e26ad). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1498.328149] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4556ad0c-cb95-4de3-b876-7a1f190b32e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.336198] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Created folder: Project (6919de51a2ef456db7a25d4cec1e26ad) in parent group-v368199. [ 1498.336198] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Creating folder: Instances. Parent ref: group-v368335. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1498.336354] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-866183f7-d798-4e3b-87e5-eeb08f7cdf81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.346488] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Created folder: Instances in parent group-v368335. [ 1498.346761] env[63371]: DEBUG oslo.service.loopingcall [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.347245] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1498.347461] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85aa54b0-e32c-40aa-92b3-f9028c2a4ffa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.371318] env[63371]: INFO nova.compute.manager [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Terminating instance [ 1498.374587] env[63371]: DEBUG nova.compute.manager [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1498.375023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1498.376357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dd6486-b426-44e5-a7c6-4d886d8fb218 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.379903] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1498.379903] env[63371]: value = "task-1774018" [ 1498.379903] env[63371]: _type = "Task" [ 1498.379903] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.386202] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1498.387020] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48dc1638-cd68-4142-8aa4-ab6758cac91d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.392453] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774018, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.393849] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1498.393849] env[63371]: value = "task-1774019" [ 1498.393849] env[63371]: _type = "Task" [ 1498.393849] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.401876] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.496544] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774014, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.516533] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774015, 'name': Rename_Task, 'duration_secs': 0.304717} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.516872] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1498.517013] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2691f08e-d0bb-4548-90fb-359cc0a95925 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.523412] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1498.523412] env[63371]: value = "task-1774020" [ 1498.523412] env[63371]: _type = "Task" [ 1498.523412] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.536242] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774020, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.553983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "36b81143-211f-4c77-854b-abe0d3f39ce4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.554354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.554610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.554842] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.555466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.557177] env[63371]: INFO nova.compute.manager [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Terminating instance [ 1498.559909] env[63371]: DEBUG nova.compute.manager [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1498.560114] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1498.561113] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16135074-1976-444f-ba0e-ca8ff06a5126 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.569825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1498.570010] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3fdd078-572d-4c96-afcf-090ab9740068 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.577162] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1498.577162] env[63371]: value = "task-1774021" [ 1498.577162] env[63371]: _type = "Task" [ 1498.577162] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.587392] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.621844] env[63371]: DEBUG nova.scheduler.client.report [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1498.896845] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774018, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.906506] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774019, 'name': PowerOffVM_Task, 'duration_secs': 0.329697} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.906940] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.907268] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1498.907660] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af1c6d9c-6d02-49fe-a7b8-d080f214a389 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.995968] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1498.996164] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1498.996415] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleting the datastore file [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.000410] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c6066d1-0fda-4949-96cd-1d2ad77e0973 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.002697] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774014, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.008955] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1499.008955] env[63371]: value = "task-1774023" [ 1499.008955] env[63371]: _type = "Task" [ 1499.008955] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.016085] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.039143] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774020, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.092526] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774021, 'name': PowerOffVM_Task, 'duration_secs': 0.186359} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.092914] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1499.093028] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1499.093340] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01237cc5-8628-4032-a356-61945c32a5d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.128223] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.130660] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 30.610s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.153458] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updated VIF entry in instance network info cache for port f99cf773-dc88-4581-961d-63fdebbf96ff. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1499.155636] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [{"id": "2691ba66-0c30-4f84-af20-63a2d5a37564", "address": "fa:16:3e:fc:f0:88", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2691ba66-0c", "ovs_interfaceid": "2691ba66-0c30-4f84-af20-63a2d5a37564", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "address": "fa:16:3e:46:df:60", "network": {"id": "471b81c3-7351-4064-a32c-f718bdf819c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-590329125", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fad7cfe-3c", "ovs_interfaceid": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f99cf773-dc88-4581-961d-63fdebbf96ff", "address": "fa:16:3e:86:be:16", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf773-dc", "ovs_interfaceid": "f99cf773-dc88-4581-961d-63fdebbf96ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.171551] env[63371]: INFO nova.scheduler.client.report [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted allocations for instance 855005ae-3b0e-4ad7-80cf-266075fc6d0f [ 1499.222902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.223171] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.225833] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1499.225833] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1499.225833] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleting the datastore file [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.226217] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf97460a-27b4-4218-b9ab-a400bb809fe4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.233439] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1499.233439] env[63371]: value = "task-1774025" [ 1499.233439] env[63371]: _type = "Task" [ 1499.233439] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.247765] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.398657] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774018, 'name': CreateVM_Task, 'duration_secs': 0.721836} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.398833] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1499.400135] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.400448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.400925] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1499.401329] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b392abd-ee87-47c2-a56d-1049e70d8678 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.408176] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1499.408176] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520dcc41-55fc-8c48-110e-7a9db5997e85" [ 1499.408176] env[63371]: _type = "Task" [ 1499.408176] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.421041] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520dcc41-55fc-8c48-110e-7a9db5997e85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.504234] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774014, 'name': CloneVM_Task, 'duration_secs': 1.406816} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.504234] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Created linked-clone VM from snapshot [ 1499.504315] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ca6079-474c-4f97-bfdd-cd4c25817791 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.514875] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Uploading image 6287f359-692e-438d-8347-f0d2b27b0f80 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1499.522178] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291218} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.522427] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1499.522602] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1499.522769] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1499.522933] env[63371]: INFO nova.compute.manager [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1499.523188] env[63371]: DEBUG oslo.service.loopingcall [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.524343] env[63371]: DEBUG nova.compute.manager [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1499.524343] env[63371]: DEBUG nova.network.neutron [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.540984] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774020, 'name': PowerOnVM_Task, 'duration_secs': 0.611429} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.540984] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1499.540984] env[63371]: INFO nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1499.540984] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1499.541820] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50690f67-221f-48ef-869d-3299ba4c2fe1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.546964] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1499.546964] env[63371]: value = "vm-368334" [ 1499.546964] env[63371]: _type = "VirtualMachine" [ 1499.546964] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1499.547205] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-50d5ff3b-362a-4473-9cc5-bdc506a44410 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.558273] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease: (returnval){ [ 1499.558273] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1499.558273] env[63371]: _type = "HttpNfcLease" [ 1499.558273] env[63371]: } obtained for exporting VM: (result){ [ 1499.558273] env[63371]: value = "vm-368334" [ 1499.558273] env[63371]: _type = "VirtualMachine" [ 1499.558273] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1499.558273] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the lease: (returnval){ [ 1499.558273] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1499.558273] env[63371]: _type = "HttpNfcLease" [ 1499.558273] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1499.570753] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1499.570753] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1499.570753] env[63371]: _type = "HttpNfcLease" [ 1499.570753] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1499.659453] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Releasing lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.685897] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.794s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.726300] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1499.744920] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.36188} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.745120] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1499.745308] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1499.745483] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1499.745650] env[63371]: INFO nova.compute.manager [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1499.745890] env[63371]: DEBUG oslo.service.loopingcall [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.746088] env[63371]: DEBUG nova.compute.manager [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1499.746183] env[63371]: DEBUG nova.network.neutron [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.921744] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520dcc41-55fc-8c48-110e-7a9db5997e85, 'name': SearchDatastore_Task, 'duration_secs': 0.024095} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.922196] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.922322] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1499.922528] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.922671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.922878] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1499.923154] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86de98df-e8b8-4677-a6df-4a04263b8781 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.933184] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1499.933379] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1499.934230] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05c99b54-7dcb-40f6-99ca-d90666a8b77f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.941542] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1499.941542] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5297d2e9-4286-2605-9fae-8f9dec6ba89d" [ 1499.941542] env[63371]: _type = "Task" [ 1499.941542] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.952149] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5297d2e9-4286-2605-9fae-8f9dec6ba89d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.075535] env[63371]: INFO nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Took 40.27 seconds to build instance. [ 1500.078026] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1500.078026] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1500.078026] env[63371]: _type = "HttpNfcLease" [ 1500.078026] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1500.078518] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1500.078518] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1500.078518] env[63371]: _type = "HttpNfcLease" [ 1500.078518] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1500.079238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7110824e-8825-4fa2-8527-fc8629409363 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.086609] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1500.086786] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1500.183020] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 47c1c242-d190-4523-8033-307c5a9b7535 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183231] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 33cf00ea-3195-41cf-9b7a-a8e64496a122 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.183355] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e4608e3c-7083-42fa-b88c-8ee007ef7f60 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183482] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e0369f27-68ea-49c4-8524-3dbbb3cde96e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183604] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e912c210-3ae1-47ce-b9cd-afebf6195606 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183713] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 76c861a7-30f2-40f4-b723-7912975f36f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183818] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183950] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance dc6ef0a7-1744-4b90-b385-913cb796f7d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.184082] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.184217] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 44cc8606-24f5-4f6b-b96f-3559c9c3f06e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.184344] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e8bd5802-d2ff-4348-92d4-c23277f4eaeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.184879] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance fb2ddd3e-7adc-4a34-8797-0e98fdf19379 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.184879] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 36b81143-211f-4c77-854b-abe0d3f39ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.184879] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 50d5eac1-0752-4089-948c-b04439df6f6c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.184879] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance be37eb1c-8582-4446-afd6-ae11a8cadf95 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.186191] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 64fc862c-a755-4cac-997b-7a8328638269 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.186191] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance dcf8063b-56eb-439c-bee5-139a1e157714 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.186191] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186191] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance with task_state "deleting" is not being actively managed by this compute host but has allocations referencing this compute node (c079ebb1-2fa2-4df9-bdab-118e305653c1): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1500.186191] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e6cd62ce-f6d2-4e5b-acbc-7527a94e0932 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186470] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 594ff846-8e3e-4882-8ddc-41f824a77a5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186470] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b523486c-adae-4322-80be-1f3bf33ca192 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186540] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f8119ade-7018-4ad8-82fe-baa0a6753c64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186620] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.191314] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-535ea6bb-28b9-4472-b7b7-a4ef442dac6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.227575] env[63371]: DEBUG nova.compute.manager [req-7bba1b98-41f9-439c-911e-75a52534c52f req-06b6fe47-2874-48b9-b02b-e1806049c9e0 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Received event network-vif-deleted-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1500.227775] env[63371]: INFO nova.compute.manager [req-7bba1b98-41f9-439c-911e-75a52534c52f req-06b6fe47-2874-48b9-b02b-e1806049c9e0 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Neutron deleted interface d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9; detaching it from the instance and deleting it from the info cache [ 1500.227947] env[63371]: DEBUG nova.network.neutron [req-7bba1b98-41f9-439c-911e-75a52534c52f req-06b6fe47-2874-48b9-b02b-e1806049c9e0 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.252037] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.406014] env[63371]: DEBUG nova.network.neutron [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.457226] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5297d2e9-4286-2605-9fae-8f9dec6ba89d, 'name': SearchDatastore_Task, 'duration_secs': 0.032676} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.457226] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec539232-5788-4dcc-9c45-8b2ee96a5056 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.466880] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1500.466880] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52793717-6c47-1404-d387-5060e4de6ee3" [ 1500.466880] env[63371]: _type = "Task" [ 1500.466880] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.478897] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52793717-6c47-1404-d387-5060e4de6ee3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.579606] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.136s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.672444] env[63371]: DEBUG nova.network.neutron [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.692797] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1500.733130] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aafe4311-98ec-4926-bc05-3585727ae3f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.743059] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35acf661-79fc-4bc0-8b96-2a2b819998d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.782028] env[63371]: DEBUG nova.compute.manager [req-7bba1b98-41f9-439c-911e-75a52534c52f req-06b6fe47-2874-48b9-b02b-e1806049c9e0 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Detach interface failed, port_id=d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9, reason: Instance fb2ddd3e-7adc-4a34-8797-0e98fdf19379 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1500.909352] env[63371]: INFO nova.compute.manager [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Took 1.39 seconds to deallocate network for instance. [ 1500.981065] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52793717-6c47-1404-d387-5060e4de6ee3, 'name': SearchDatastore_Task, 'duration_secs': 0.014696} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.981065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.982025] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64/f8119ade-7018-4ad8-82fe-baa0a6753c64.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1500.982704] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef9aaecd-2366-45ab-868e-ef0ac7e2bc7a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.993978] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1500.993978] env[63371]: value = "task-1774027" [ 1500.993978] env[63371]: _type = "Task" [ 1500.993978] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.004385] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.179182] env[63371]: INFO nova.compute.manager [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Took 1.43 seconds to deallocate network for instance. [ 1501.196278] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6df9af10-0053-4696-920a-10ab2af67ef5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1501.205147] env[63371]: DEBUG nova.compute.manager [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1501.207016] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcefffc-4952-45b3-99dc-220a53fa3fca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.418718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.506580] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489281} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.506949] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64/f8119ade-7018-4ad8-82fe-baa0a6753c64.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1501.507340] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1501.507658] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1af9c49-8f74-4a0b-bad7-199a26025842 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.514197] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1501.514197] env[63371]: value = "task-1774028" [ 1501.514197] env[63371]: _type = "Task" [ 1501.514197] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.523138] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.691110] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.703725] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 195de525-1081-4db6-acf3-04a6d3eb142f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1501.719354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.719646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.723960] env[63371]: INFO nova.compute.manager [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] instance snapshotting [ 1501.725227] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978334eb-7332-4a7f-b660-15fa4239f2f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.749682] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2465b541-d223-4c45-87c9-cd888f6a3b69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.027959] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091706} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.027959] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1502.028953] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095efcaa-3292-4f71-8aa1-6f744bb6a4ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.063547] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64/f8119ade-7018-4ad8-82fe-baa0a6753c64.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1502.064487] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d081a2cf-0930-4ab4-9af9-4c3ca7fe7b0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.090084] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1502.090084] env[63371]: value = "task-1774029" [ 1502.090084] env[63371]: _type = "Task" [ 1502.090084] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.100549] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.210802] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0e2c8ced-198f-43be-9d41-703a7c590df4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1502.224349] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1502.264064] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1502.264403] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7686bc94-d1f2-4171-8bed-4ea42872dee4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.274781] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1502.274781] env[63371]: value = "task-1774030" [ 1502.274781] env[63371]: _type = "Task" [ 1502.274781] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.286118] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774030, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.288546] env[63371]: DEBUG nova.compute.manager [req-86c3a208-0060-4516-b687-f09aa1c67c10 req-f09a94d1-40a4-4935-a449-8193cb21a617 service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Received event network-vif-deleted-767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1502.606214] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774029, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.715776] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 574121c4-c721-4d30-81ec-3f2310a7b6d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1502.750980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.790668] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774030, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.102118] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774029, 'name': ReconfigVM_Task, 'duration_secs': 0.523414} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.102462] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Reconfigured VM instance instance-00000030 to attach disk [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64/f8119ade-7018-4ad8-82fe-baa0a6753c64.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1503.103219] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54cf3a39-0100-43c0-a428-bb8cf91edc58 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.110178] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1503.110178] env[63371]: value = "task-1774031" [ 1503.110178] env[63371]: _type = "Task" [ 1503.110178] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.125504] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774031, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.219422] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1503.288209] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774030, 'name': CreateSnapshot_Task, 'duration_secs': 0.7318} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.288209] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1503.288583] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74827d2-79ae-48d2-a6f8-98b08e279ce2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.439016] env[63371]: DEBUG nova.compute.manager [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1503.630133] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774031, 'name': Rename_Task, 'duration_secs': 0.254779} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.630133] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1503.630133] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-610b85e7-275e-4efe-ac5e-b39c717b020f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.638810] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1503.638810] env[63371]: value = "task-1774032" [ 1503.638810] env[63371]: _type = "Task" [ 1503.638810] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.651034] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.723355] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 1c93487b-6d8f-424d-8b95-10bfb894c609 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1503.810945] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1503.811982] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-95bb3226-4b50-41ee-86b4-5c762acc5506 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.821400] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1503.821400] env[63371]: value = "task-1774033" [ 1503.821400] env[63371]: _type = "Task" [ 1503.821400] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.830323] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.967474] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.151431] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774032, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.229271] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance d6bc618e-33c9-4b45-b79f-afe6811acd4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.229271] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1504.229271] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1504.333859] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.655394] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774032, 'name': PowerOnVM_Task, 'duration_secs': 0.778069} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.655857] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1504.656192] env[63371]: INFO nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Took 16.59 seconds to spawn the instance on the hypervisor. [ 1504.656482] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1504.657545] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fca5c4-e532-48da-8a9d-560bc91d1dc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.832246] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16510582-f232-4381-b811-b24fb7f91370 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.838595] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.844832] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4dac19-cf5b-4eda-a8e6-b0daa5dac1ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.878282] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b8ba96-b63a-4f3e-ae80-7abc11307281 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.886120] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8615f2c3-b522-4550-846d-916bf17a19db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.901410] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1505.180831] env[63371]: INFO nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Took 46.24 seconds to build instance. [ 1505.335529] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.404684] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1505.683235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.806s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.838888] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.914644] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1505.914994] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.784s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.915259] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.367s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.916826] env[63371]: INFO nova.compute.claims [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1506.017979] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.018228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.018386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.018602] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.018833] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.021505] env[63371]: INFO nova.compute.manager [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Terminating instance [ 1506.023420] env[63371]: DEBUG nova.compute.manager [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1506.023568] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1506.024409] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614f0d94-8db6-4521-a1ef-c4673388b84f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.032266] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1506.032501] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11d48e4e-c6f6-4426-89ce-9e41d68385ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.043251] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1506.043251] env[63371]: value = "task-1774037" [ 1506.043251] env[63371]: _type = "Task" [ 1506.043251] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.050267] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.339421] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task, 'duration_secs': 2.379382} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.339736] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created linked-clone VM from snapshot [ 1506.340516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e30946-6674-4042-92f2-65feff37720c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.348347] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Uploading image c308ba9c-513a-4658-a60c-4dcff19c7679 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1506.370790] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1506.370790] env[63371]: value = "vm-368339" [ 1506.370790] env[63371]: _type = "VirtualMachine" [ 1506.370790] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1506.371191] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-472d9192-83f0-4f28-ab14-5f0eac138da6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.378732] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lease: (returnval){ [ 1506.378732] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.378732] env[63371]: _type = "HttpNfcLease" [ 1506.378732] env[63371]: } obtained for exporting VM: (result){ [ 1506.378732] env[63371]: value = "vm-368339" [ 1506.378732] env[63371]: _type = "VirtualMachine" [ 1506.378732] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1506.378987] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the lease: (returnval){ [ 1506.378987] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.378987] env[63371]: _type = "HttpNfcLease" [ 1506.378987] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1506.385469] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1506.385469] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.385469] env[63371]: _type = "HttpNfcLease" [ 1506.385469] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1506.552429] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774037, 'name': PowerOffVM_Task, 'duration_secs': 0.263944} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.552721] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1506.552920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1506.553245] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbf0959a-5c32-4118-a6d9-cda2708ebc4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.729258] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1506.729514] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1506.729806] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleting the datastore file [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1506.730311] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e799907-0961-4780-9b21-6f8fec26a728 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.738285] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1506.738285] env[63371]: value = "task-1774040" [ 1506.738285] env[63371]: _type = "Task" [ 1506.738285] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.747059] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.887014] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1506.887014] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.887014] env[63371]: _type = "HttpNfcLease" [ 1506.887014] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1506.888543] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1506.888543] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.888543] env[63371]: _type = "HttpNfcLease" [ 1506.888543] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1506.888543] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e73fb79-7873-4e5e-a224-5b5d34372325 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.896982] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1506.897374] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1507.014720] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bcbf705a-70d3-4e51-b7c7-787e2a2d7e97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.249980] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.252718] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1507.252718] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1507.252718] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1507.253203] env[63371]: INFO nova.compute.manager [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1507.253203] env[63371]: DEBUG oslo.service.loopingcall [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.255830] env[63371]: DEBUG nova.compute.manager [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1507.255945] env[63371]: DEBUG nova.network.neutron [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1507.496813] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90461a01-9620-464f-a7f9-54e3752ca5d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.506077] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41a98a3-9b29-459f-8af2-c0c61f84c171 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.544298] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e41b24d-0ce2-4d84-ae3a-4e30e4ac8b31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.552363] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3efe6e1-cc09-45b4-8693-ea611acf2a7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.569560] env[63371]: DEBUG nova.compute.provider_tree [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1507.625274] env[63371]: DEBUG nova.compute.manager [req-63fdaabf-8f6a-439b-8f86-6eea59a9da98 req-81ab80e6-ba28-47d7-9b69-774e08a232f2 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-deleted-0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1507.625694] env[63371]: INFO nova.compute.manager [req-63fdaabf-8f6a-439b-8f86-6eea59a9da98 req-81ab80e6-ba28-47d7-9b69-774e08a232f2 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Neutron deleted interface 0fad7cfe-3cf5-4996-9d68-50d1919577c7; detaching it from the instance and deleting it from the info cache [ 1507.626095] env[63371]: DEBUG nova.network.neutron [req-63fdaabf-8f6a-439b-8f86-6eea59a9da98 req-81ab80e6-ba28-47d7-9b69-774e08a232f2 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [{"id": "2691ba66-0c30-4f84-af20-63a2d5a37564", "address": "fa:16:3e:fc:f0:88", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2691ba66-0c", "ovs_interfaceid": "2691ba66-0c30-4f84-af20-63a2d5a37564", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f99cf773-dc88-4581-961d-63fdebbf96ff", "address": "fa:16:3e:86:be:16", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf773-dc", "ovs_interfaceid": "f99cf773-dc88-4581-961d-63fdebbf96ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.074136] env[63371]: DEBUG nova.scheduler.client.report [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1508.130883] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf33477b-e01d-46a4-bfce-b78ece252fb5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.143519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985d94b3-5996-457b-a880-4f646d898c8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.180767] env[63371]: DEBUG nova.compute.manager [req-63fdaabf-8f6a-439b-8f86-6eea59a9da98 req-81ab80e6-ba28-47d7-9b69-774e08a232f2 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Detach interface failed, port_id=0fad7cfe-3cf5-4996-9d68-50d1919577c7, reason: Instance f8119ade-7018-4ad8-82fe-baa0a6753c64 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1508.374708] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1508.375810] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52a08e7-70a8-4a15-8723-6aa74bee3d21 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.385589] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1508.387655] env[63371]: ERROR oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk due to incomplete transfer. [ 1508.387655] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-57a8c1fa-123c-40be-914c-86da8c5ce13c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.396806] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1508.396806] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Uploaded image 6287f359-692e-438d-8347-f0d2b27b0f80 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1508.399410] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1508.399762] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d6b462e6-8270-4d05-9bee-103a9f613852 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.406921] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1508.406921] env[63371]: value = "task-1774041" [ 1508.406921] env[63371]: _type = "Task" [ 1508.406921] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.417395] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774041, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.582307] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.583117] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1508.589845] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.810s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.589845] env[63371]: INFO nova.compute.claims [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1508.835152] env[63371]: DEBUG nova.network.neutron [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.918555] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774041, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.095963] env[63371]: DEBUG nova.compute.utils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1509.097589] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1509.098182] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1509.173672] env[63371]: DEBUG nova.policy [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f85b2454eed34665b92a1ebc087353c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f98ab0107f5040139ef8be7c3ae22207', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1509.340609] env[63371]: INFO nova.compute.manager [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Took 2.08 seconds to deallocate network for instance. [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.402103] env[63371]: INFO nova.compute.manager [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Terminating instance [ 1509.404381] env[63371]: DEBUG nova.compute.manager [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1509.404703] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1509.406036] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9504eaf-2ba3-4fd6-b288-827fe74bc072 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.419724] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1509.424204] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a653658-7066-4c00-8d01-b5c81ba710f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.426458] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774041, 'name': Destroy_Task, 'duration_secs': 0.643534} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.427271] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Destroyed the VM [ 1509.427716] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1509.428498] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-687c5f4f-9988-4544-aadb-bf2d9600b9f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.437123] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1509.437123] env[63371]: value = "task-1774044" [ 1509.437123] env[63371]: _type = "Task" [ 1509.437123] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.445377] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774044, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.561314] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1509.561314] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1509.561314] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleting the datastore file [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1509.561314] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0193c44d-2d9a-4f7b-82d1-eeff6b71cf01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.572394] env[63371]: DEBUG oslo_vmware.api [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1509.572394] env[63371]: value = "task-1774045" [ 1509.572394] env[63371]: _type = "Task" [ 1509.572394] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.580676] env[63371]: DEBUG oslo_vmware.api [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.606962] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1509.686377] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Successfully created port: 01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1509.699568] env[63371]: DEBUG nova.compute.manager [req-c59fa24b-c66c-4ff3-ae85-f3dd667b400c req-d66757b3-747a-4a3b-8c31-12977b0032f5 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-deleted-f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.699568] env[63371]: DEBUG nova.compute.manager [req-c59fa24b-c66c-4ff3-ae85-f3dd667b400c req-d66757b3-747a-4a3b-8c31-12977b0032f5 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-deleted-2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.852347] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.953751] env[63371]: DEBUG nova.compute.utils [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Cleaning up image 6287f359-692e-438d-8347-f0d2b27b0f80 {{(pid=63371) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 1510.083674] env[63371]: WARNING nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Traceback (most recent call last): [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1113, in _destroy_instance [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] ds_util.file_delete(self._session, [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] session._wait_for_task(file_delete_task) [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] return self.wait_for_task(task_ref) [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] return evt.wait() [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] result = hub.switch() [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] return self.greenlet.switch() [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] self.f(*self.args, **self.kw) [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] raise exceptions.translate_fault(task_info.error) [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] [ 1510.084417] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1510.084607] env[63371]: INFO nova.compute.manager [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Took 0.68 seconds to destroy the instance on the hypervisor. [ 1510.084810] env[63371]: DEBUG oslo.service.loopingcall [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.087712] env[63371]: DEBUG nova.compute.manager [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1510.088199] env[63371]: DEBUG nova.network.neutron [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1510.161512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.161778] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.163433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb01281-84ed-4d40-b60c-914566edefd9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.172875] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22661a4-a50c-4b94-8762-b56f5e1f8671 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.205580] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53369d3d-2711-4b91-9183-aa37784b8df8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.215719] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3e43bb-a94a-497c-9a06-530f52219262 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.232127] env[63371]: DEBUG nova.compute.provider_tree [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1510.619569] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1510.656147] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1510.656484] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1510.656566] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1510.656712] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1510.656910] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1510.657010] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1510.657237] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1510.657424] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1510.657559] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1510.657728] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1510.658113] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1510.658791] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7882b04-39ba-4804-9882-4d65f1e49bbd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.666793] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1510.670585] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0362d10b-e4e3-40a3-8d55-f53a69faba5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.735569] env[63371]: DEBUG nova.scheduler.client.report [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1510.907472] env[63371]: DEBUG nova.network.neutron [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.195013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.240935] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.241941] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1511.244966] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.087s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.245249] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.248716] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.620s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.249606] env[63371]: INFO nova.compute.claims [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1511.276579] env[63371]: INFO nova.scheduler.client.report [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Deleted allocations for instance 50d5eac1-0752-4089-948c-b04439df6f6c [ 1511.414384] env[63371]: INFO nova.compute.manager [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Took 1.32 seconds to deallocate network for instance. [ 1511.484486] env[63371]: DEBUG oslo_concurrency.lockutils [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.514932] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Successfully updated port: 01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1511.734570] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Received event network-vif-deleted-3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.734965] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received event network-vif-plugged-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.735154] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.735427] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.735665] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.735892] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] No waiting events found dispatching network-vif-plugged-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1511.736138] env[63371]: WARNING nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received unexpected event network-vif-plugged-01b878e5-651e-49f1-959f-7da17291c0bc for instance with vm_state building and task_state spawning. [ 1511.736371] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received event network-changed-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.736583] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Refreshing instance network info cache due to event network-changed-01b878e5-651e-49f1-959f-7da17291c0bc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1511.736845] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.737081] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.737296] env[63371]: DEBUG nova.network.neutron [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Refreshing network info cache for port 01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1511.755869] env[63371]: DEBUG nova.compute.utils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1511.759041] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1511.759041] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1511.783359] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.392s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.863141] env[63371]: DEBUG nova.policy [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ef97c1a9a174c1888972e6f281eecbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2601d597b4d64481ace490d56d1056a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1511.920090] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.021889] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.266425] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1512.297458] env[63371]: DEBUG nova.network.neutron [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1512.470727] env[63371]: DEBUG nova.network.neutron [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.511876] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Successfully created port: a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1512.920519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b5c3f1-20e4-44d3-ade1-74e4e2d4e105 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.929913] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e1c9d6-b030-4de0-8ac0-e3830fea01f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.975103] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d372f768-3759-4d59-90d7-146fb6014f5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.979399] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.979890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.980077] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.987501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42dca31b-b230-49d0-b73c-01983b34c1c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.007068] env[63371]: DEBUG nova.compute.provider_tree [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1513.293874] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1513.329280] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1513.329951] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1513.329951] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1513.330108] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1513.330314] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1513.330509] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1513.330993] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1513.330993] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1513.331212] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1513.331425] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1513.331653] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1513.333370] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57e5eef-8bac-413c-ae3a-8c87c7d9bc76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.345797] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1a1405-7401-4160-bdac-af25c81c5fe5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.512117] env[63371]: DEBUG nova.scheduler.client.report [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1513.548480] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1513.790980] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.020168] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.770s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.020168] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1514.022615] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.259s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.022815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.025083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.503s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.025314] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.027121] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.069s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.028785] env[63371]: INFO nova.compute.claims [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1514.065323] env[63371]: INFO nova.scheduler.client.report [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted allocations for instance 64fc862c-a755-4cac-997b-7a8328638269 [ 1514.071135] env[63371]: INFO nova.scheduler.client.report [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Deleted allocations for instance be37eb1c-8582-4446-afd6-ae11a8cadf95 [ 1514.296153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.296512] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance network_info: |[{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1514.296938] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:c4:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01b878e5-651e-49f1-959f-7da17291c0bc', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1514.309225] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating folder: Project (f98ab0107f5040139ef8be7c3ae22207). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.309225] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76b58762-966d-437f-9b30-72eccbf65d23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.320597] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created folder: Project (f98ab0107f5040139ef8be7c3ae22207) in parent group-v368199. [ 1514.320839] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating folder: Instances. Parent ref: group-v368343. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.321135] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-359981e7-4a4f-4b47-b833-552e52ca4c5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.330734] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created folder: Instances in parent group-v368343. [ 1514.331176] env[63371]: DEBUG oslo.service.loopingcall [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1514.331350] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1514.331585] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8eb75b03-0d9e-400d-b135-5a00a8031336 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.354905] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1514.354905] env[63371]: value = "task-1774050" [ 1514.354905] env[63371]: _type = "Task" [ 1514.354905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.366316] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774050, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.537454] env[63371]: DEBUG nova.compute.utils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1514.539203] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1514.539549] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1514.577235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.115s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.584020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.874s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.672031] env[63371]: DEBUG nova.policy [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b22696705ee840cb8ecd18e5abcec19c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5c5bf80b8e64c8795da4d79d6a89150', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1514.679950] env[63371]: DEBUG nova.compute.manager [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received event network-vif-plugged-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1514.680226] env[63371]: DEBUG oslo_concurrency.lockutils [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] Acquiring lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.680376] env[63371]: DEBUG oslo_concurrency.lockutils [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] Lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.680565] env[63371]: DEBUG oslo_concurrency.lockutils [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] Lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.680752] env[63371]: DEBUG nova.compute.manager [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] No waiting events found dispatching network-vif-plugged-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1514.680933] env[63371]: WARNING nova.compute.manager [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received unexpected event network-vif-plugged-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 for instance with vm_state building and task_state spawning. [ 1514.871740] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774050, 'name': CreateVM_Task, 'duration_secs': 0.428887} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.872376] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1514.873037] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.873222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.874490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1514.874490] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2905d5ce-99f1-4dda-9ede-e5c472cf7572 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.880612] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1514.880612] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528dac5b-0991-ced3-c1af-de66d313d0c8" [ 1514.880612] env[63371]: _type = "Task" [ 1514.880612] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.889922] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528dac5b-0991-ced3-c1af-de66d313d0c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.902140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "e05c7187-b4d6-481e-8bce-deb557dde6a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.902387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.045340] env[63371]: DEBUG nova.compute.utils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1515.290430] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Successfully updated port: a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1515.401237] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528dac5b-0991-ced3-c1af-de66d313d0c8, 'name': SearchDatastore_Task, 'duration_secs': 0.011534} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.401237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.401237] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1515.401237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.401237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.401592] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.401592] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6748be3-6683-415a-afb7-99a4737c8ce9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.412677] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.412677] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1515.412677] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-783695b5-7fb2-4f9c-8d96-69202bf8ef06 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.418525] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1515.418525] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5225fae1-d233-4185-f27e-410b6f6a3659" [ 1515.418525] env[63371]: _type = "Task" [ 1515.418525] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.431294] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5225fae1-d233-4185-f27e-410b6f6a3659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.552652] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1515.583556] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8e2a7b-6811-4799-ad51-0a170bdd95c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.594082] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46833eda-f99f-4e04-89f9-31dc17a92374 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.639088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cd6e38-4964-4d1c-9870-ab5fb7b3a91b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.656290] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca32e50a-c5b1-4fac-962b-0bcf41887f0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.677137] env[63371]: DEBUG nova.compute.provider_tree [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1515.729448] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Successfully created port: 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1515.793805] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.793805] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.794296] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1515.934633] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5225fae1-d233-4185-f27e-410b6f6a3659, 'name': SearchDatastore_Task, 'duration_secs': 0.011747} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.935683] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7691755-c290-4430-985b-c6f808fad47f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.945553] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1515.945553] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529db7a3-a5bc-90a5-678c-bb171cffde01" [ 1515.945553] env[63371]: _type = "Task" [ 1515.945553] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.955946] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529db7a3-a5bc-90a5-678c-bb171cffde01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.182598] env[63371]: DEBUG nova.scheduler.client.report [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1516.355455] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1516.431222] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1516.431222] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df29b4bc-95cb-43d4-b188-1ecebe854b3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.441451] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1516.442293] env[63371]: ERROR oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk due to incomplete transfer. [ 1516.442293] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1621101c-cf38-4adb-bcff-f644855fbae5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.453855] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529db7a3-a5bc-90a5-678c-bb171cffde01, 'name': SearchDatastore_Task, 'duration_secs': 0.017132} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.456811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.456811] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1516.456811] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3593896-8ff1-4e45-9df2-2c316f2d02b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.462469] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1516.462469] env[63371]: value = "task-1774052" [ 1516.462469] env[63371]: _type = "Task" [ 1516.462469] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.465716] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1516.466214] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Uploaded image c308ba9c-513a-4658-a60c-4dcff19c7679 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1516.467915] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1516.468626] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-de48b43b-8227-4f09-8a9d-1be0e81c191b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.477557] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.482919] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1516.482919] env[63371]: value = "task-1774053" [ 1516.482919] env[63371]: _type = "Task" [ 1516.482919] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.494251] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774053, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.571019] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1516.571019] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.597959] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:31:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='431901221',id=18,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-933523965',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1516.598317] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1516.598530] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1516.598772] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1516.598997] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1516.599248] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1516.599566] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1516.599726] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1516.599930] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1516.600174] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1516.600395] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1516.601840] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c146b5b-2a8c-4c06-b903-bb973c41c08e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.610958] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0419e7e5-5b4f-416d-bbc1-61ec5dbd47b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.702737] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.702737] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1516.706625] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.356s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.706840] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.712951] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.171s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.712951] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.715498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.923s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.715498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.717706] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.487s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.717706] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.719720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.080s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.721856] env[63371]: INFO nova.compute.claims [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1516.769570] env[63371]: INFO nova.scheduler.client.report [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted allocations for instance dcf8063b-56eb-439c-bee5-139a1e157714 [ 1516.773879] env[63371]: INFO nova.scheduler.client.report [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleted allocations for instance dc6ef0a7-1744-4b90-b385-913cb796f7d0 [ 1516.840989] env[63371]: INFO nova.scheduler.client.report [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted allocations for instance 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 [ 1516.925640] env[63371]: INFO nova.scheduler.client.report [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleted allocations for instance 33cf00ea-3195-41cf-9b7a-a8e64496a122 [ 1516.974892] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774052, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.003624] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774053, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.076337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.076745] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Instance network_info: |[{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1517.077228] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:c6:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7788c55-6aa0-4056-b8d1-cff8ad8951f7', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1517.086583] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating folder: Project (2601d597b4d64481ace490d56d1056a6). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1517.087291] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db58ab48-52ad-4258-b3ca-deee2c00bc5b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.098960] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created folder: Project (2601d597b4d64481ace490d56d1056a6) in parent group-v368199. [ 1517.099111] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating folder: Instances. Parent ref: group-v368347. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1517.099571] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-146ed245-fe96-4c10-91f6-1d4280dc1f1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.103575] env[63371]: DEBUG nova.compute.manager [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1517.103780] env[63371]: DEBUG nova.compute.manager [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing instance network info cache due to event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1517.103981] env[63371]: DEBUG oslo_concurrency.lockutils [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] Acquiring lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.104330] env[63371]: DEBUG oslo_concurrency.lockutils [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] Acquired lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.104399] env[63371]: DEBUG nova.network.neutron [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1517.115123] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created folder: Instances in parent group-v368347. [ 1517.115378] env[63371]: DEBUG oslo.service.loopingcall [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.115569] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1517.115784] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-887f0fcd-f779-4409-9d13-5cba55ee9cf9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.138022] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1517.138022] env[63371]: value = "task-1774056" [ 1517.138022] env[63371]: _type = "Task" [ 1517.138022] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.151429] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774056, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.229335] env[63371]: DEBUG nova.compute.utils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1517.233784] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1517.235964] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1517.293412] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.595s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.294695] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.544s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.353172] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.575s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.372625] env[63371]: DEBUG nova.policy [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'acdd9325a8b0496aad20c5dbd1c37ff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2fc50868ddcf4193beb9b3a8a37f97b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1517.435271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.369s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.472538] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774052, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57651} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.472840] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1517.473023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1517.473586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26b97ad5-8625-463f-bfb4-50e3fba5f549 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.480176] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1517.480176] env[63371]: value = "task-1774058" [ 1517.480176] env[63371]: _type = "Task" [ 1517.480176] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.490949] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774058, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.496414] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774053, 'name': Destroy_Task, 'duration_secs': 0.586963} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.496568] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroyed the VM [ 1517.496800] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1517.497065] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-378dcbcb-b406-48eb-bfb3-6cb77555c9b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.507246] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1517.507246] env[63371]: value = "task-1774059" [ 1517.507246] env[63371]: _type = "Task" [ 1517.507246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.520144] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774059, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.652056] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774056, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.677203] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Successfully updated port: 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1517.738883] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1517.824514] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "76c861a7-30f2-40f4-b723-7912975f36f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.824796] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.825010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.825206] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.825378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.831122] env[63371]: INFO nova.compute.manager [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Terminating instance [ 1517.832156] env[63371]: DEBUG nova.compute.manager [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1517.832480] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1517.833305] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ffed3f-8214-4933-901d-dedba76c5adf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.841046] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1517.843435] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82ef5d6c-292a-4da3-896c-d616f28fa199 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.852410] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1517.852410] env[63371]: value = "task-1774060" [ 1517.852410] env[63371]: _type = "Task" [ 1517.852410] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.865100] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1774060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.992147] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774058, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090352} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.992147] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1517.993107] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c8be1d-81e4-46f4-a459-61ab53979611 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.017729] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1518.025159] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d229367-84da-421c-880e-5083cd8c94a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.050747] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774059, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.051067] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1518.051067] env[63371]: value = "task-1774061" [ 1518.051067] env[63371]: _type = "Task" [ 1518.051067] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.057397] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Successfully created port: d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1518.067326] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.147632] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774056, 'name': CreateVM_Task, 'duration_secs': 0.601064} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.150243] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1518.151256] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.151911] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.151911] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1518.152230] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98d41a33-31fb-4473-82e6-bcb843fa8fb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.157019] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1518.157019] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52725138-497f-54eb-aac3-374b9acbd605" [ 1518.157019] env[63371]: _type = "Task" [ 1518.157019] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.169268] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52725138-497f-54eb-aac3-374b9acbd605, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.179917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.180039] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.180195] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1518.260788] env[63371]: DEBUG nova.network.neutron [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updated VIF entry in instance network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1518.261203] env[63371]: DEBUG nova.network.neutron [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.352699] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2adc22-c58f-450a-9d24-8756e102a018 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.367061] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bb6977-eb38-4c67-a01a-ef18bc47ebee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.371960] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1774060, 'name': PowerOffVM_Task, 'duration_secs': 0.199667} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.371960] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1518.371960] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1518.373780] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85eec6ad-ea9e-41a3-835c-c18958936c89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.402522] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98c15d7-0938-4340-b478-3450e72bdf66 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.410399] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702a0e49-9281-4c37-84c0-b2bd36dbf554 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.427803] env[63371]: DEBUG nova.compute.provider_tree [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.498346] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1518.498566] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1518.498818] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleting the datastore file [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1518.499105] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7e292f5-e078-491f-803c-c9398f1afaf8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.506194] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1518.506194] env[63371]: value = "task-1774063" [ 1518.506194] env[63371]: _type = "Task" [ 1518.506194] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.517581] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1774063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.520484] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774059, 'name': RemoveSnapshot_Task, 'duration_secs': 0.632125} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.520745] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1518.520980] env[63371]: INFO nova.compute.manager [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 16.80 seconds to snapshot the instance on the hypervisor. [ 1518.562317] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774061, 'name': ReconfigVM_Task, 'duration_secs': 0.302614} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.562317] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1518.562317] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-613cc7bc-2b4f-4ec5-b383-a9c4fa0681f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.573316] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1518.573316] env[63371]: value = "task-1774064" [ 1518.573316] env[63371]: _type = "Task" [ 1518.573316] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.583446] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774064, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.672251] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52725138-497f-54eb-aac3-374b9acbd605, 'name': SearchDatastore_Task, 'duration_secs': 0.010766} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.672568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.672806] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1518.673053] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.673186] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.673387] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1518.673656] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f0586cd-3397-4768-9e64-077b703688d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.685470] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.685651] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1518.686506] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edbed72f-0bf9-4d01-bc5d-8fd551f537c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.695663] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1518.695663] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8adee-5dc8-b8f8-a4ae-445c69767fd2" [ 1518.695663] env[63371]: _type = "Task" [ 1518.695663] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.704678] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8adee-5dc8-b8f8-a4ae-445c69767fd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.754793] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1518.764751] env[63371]: DEBUG oslo_concurrency.lockutils [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] Releasing lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.789644] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1518.789894] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1518.790227] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1518.790227] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1518.790472] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1518.790513] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1518.791077] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1518.791077] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1518.791077] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1518.791234] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1518.791336] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1518.792249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8daf980-a102-48ad-8c78-aa1945679746 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.796203] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1518.802491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3678dabe-8942-4a1f-8288-ab965e5b8e03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.931016] env[63371]: DEBUG nova.scheduler.client.report [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1519.017023] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1774063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144332} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.017023] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1519.017023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1519.017208] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1519.017375] env[63371]: INFO nova.compute.manager [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1519.017613] env[63371]: DEBUG oslo.service.loopingcall [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.018051] env[63371]: DEBUG nova.compute.manager [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1519.018051] env[63371]: DEBUG nova.network.neutron [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1519.084709] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774064, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.205607] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8adee-5dc8-b8f8-a4ae-445c69767fd2, 'name': SearchDatastore_Task, 'duration_secs': 0.0102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.206421] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ec3e72-2e1b-4f5c-b6af-78633825f772 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.212570] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1519.212570] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5290dbae-e89a-dca0-6abf-856afe6b3e0f" [ 1519.212570] env[63371]: _type = "Task" [ 1519.212570] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.221019] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5290dbae-e89a-dca0-6abf-856afe6b3e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.435611] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.716s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.436173] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1519.439617] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.286s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.441162] env[63371]: INFO nova.compute.claims [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1519.471057] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updating instance_info_cache with network_info: [{"id": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "address": "fa:16:3e:33:87:bc", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9291f7-15", "ovs_interfaceid": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.588993] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774064, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.724069] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5290dbae-e89a-dca0-6abf-856afe6b3e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.036504} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.724352] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.724614] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5/6df9af10-0053-4696-920a-10ab2af67ef5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1519.724991] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0d0ad59-84b9-4360-8924-4a3d15be9337 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.731586] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1519.731586] env[63371]: value = "task-1774066" [ 1519.731586] env[63371]: _type = "Task" [ 1519.731586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.741164] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.860828] env[63371]: DEBUG nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received event network-vif-plugged-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.861281] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Acquiring lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.861513] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.861655] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.861852] env[63371]: DEBUG nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] No waiting events found dispatching network-vif-plugged-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1519.862216] env[63371]: WARNING nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received unexpected event network-vif-plugged-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 for instance with vm_state building and task_state spawning. [ 1519.862457] env[63371]: DEBUG nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received event network-changed-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.864132] env[63371]: DEBUG nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Refreshing instance network info cache due to event network-changed-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1519.864132] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Acquiring lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.956160] env[63371]: DEBUG nova.compute.utils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1519.961189] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1519.961370] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1519.974411] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.974788] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Instance network_info: |[{"id": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "address": "fa:16:3e:33:87:bc", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9291f7-15", "ovs_interfaceid": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1519.975164] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Acquired lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.975384] env[63371]: DEBUG nova.network.neutron [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Refreshing network info cache for port 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1519.979273] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:87:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e9291f7-154c-4bfa-bfd8-f09dbd9b4963', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1519.986945] env[63371]: DEBUG oslo.service.loopingcall [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.989401] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1519.989943] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67930fd7-7b24-4931-aefe-b0c7434f2986 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.013836] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1520.013836] env[63371]: value = "task-1774067" [ 1520.013836] env[63371]: _type = "Task" [ 1520.013836] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.024220] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774067, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.088875] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774064, 'name': Rename_Task, 'duration_secs': 1.162097} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.088875] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1520.088875] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd935257-d079-47a3-922c-3b5944ad1090 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.099226] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1520.099226] env[63371]: value = "task-1774068" [ 1520.099226] env[63371]: _type = "Task" [ 1520.099226] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.109013] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.160065] env[63371]: DEBUG nova.policy [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31b76ca90f31495287b332ebb3001dff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96348bcfea1455dad72945c7c36f027', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1520.242490] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504689} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.242764] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5/6df9af10-0053-4696-920a-10ab2af67ef5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1520.242979] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1520.243257] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b727f5da-8fdc-4869-8af2-c8117100d9db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.249785] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1520.249785] env[63371]: value = "task-1774069" [ 1520.249785] env[63371]: _type = "Task" [ 1520.249785] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.259051] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.460588] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1520.534503] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774067, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.614409] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774068, 'name': PowerOnVM_Task, 'duration_secs': 0.495526} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.614409] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1520.614773] env[63371]: INFO nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1520.614773] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1520.615621] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3e3a66-00a1-426f-a3c3-f60d52d794db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.673702] env[63371]: DEBUG nova.network.neutron [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.766884] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079304} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.766884] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1520.766884] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92e6e67-3215-4284-85d8-065fcf48d712 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.814871] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5/6df9af10-0053-4696-920a-10ab2af67ef5.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1520.819943] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c548ee09-bb06-4957-853e-60faff2abed3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.844389] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1520.844389] env[63371]: value = "task-1774070" [ 1520.844389] env[63371]: _type = "Task" [ 1520.844389] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.854392] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774070, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.877714] env[63371]: DEBUG nova.network.neutron [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updated VIF entry in instance network info cache for port 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1520.877714] env[63371]: DEBUG nova.network.neutron [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updating instance_info_cache with network_info: [{"id": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "address": "fa:16:3e:33:87:bc", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9291f7-15", "ovs_interfaceid": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.954647] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Successfully updated port: d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1521.028952] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774067, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.063770] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Successfully created port: 12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1521.107320] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c739f742-7ea6-4c9e-a673-1471c1d2c662 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.115872] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce5b84e-9187-4215-af99-c2a57116618f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.159762] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cf9a10-2a27-49e1-9b81-2aeb4fbb27d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.164931] env[63371]: INFO nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Took 51.64 seconds to build instance. [ 1521.171651] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28beca9-b564-4f97-8d41-0e6450c9776e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.176148] env[63371]: INFO nova.compute.manager [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 2.16 seconds to deallocate network for instance. [ 1521.193134] env[63371]: DEBUG nova.compute.provider_tree [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.359963] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774070, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.387754] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Releasing lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.397080] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.397503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.397806] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.398093] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.398342] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.401421] env[63371]: INFO nova.compute.manager [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Terminating instance [ 1521.405029] env[63371]: DEBUG nova.compute.manager [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1521.405029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1521.406196] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2075d5db-e673-42e5-a96c-a3c75b512bd5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.416969] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1521.416969] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d6b0a57-6030-4f8c-ad85-7feb2ce6f276 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.425072] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1521.425072] env[63371]: value = "task-1774071" [ 1521.425072] env[63371]: _type = "Task" [ 1521.425072] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.442570] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774071, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.460085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.460085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquired lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.460085] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1521.480919] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1521.508663] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1521.508938] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1521.509475] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1521.509475] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1521.509475] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1521.509590] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1521.509779] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1521.510119] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1521.510119] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1521.510763] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1521.510763] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1521.513392] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b384bb00-31e5-4386-82f2-ec6443952f7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.526335] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d516d38-bef8-4bac-a063-6cddd0d909c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.536080] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774067, 'name': CreateVM_Task, 'duration_secs': 1.420332} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.544475] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1521.546882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.546882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.546882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1521.547133] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67ce76b8-3568-49d7-856c-6e01bcc20a20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.552552] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1521.552552] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52abaf19-e2b7-714c-ce8c-abb323edf1f4" [ 1521.552552] env[63371]: _type = "Task" [ 1521.552552] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.563369] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52abaf19-e2b7-714c-ce8c-abb323edf1f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.667488] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.337s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.683212] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.697926] env[63371]: DEBUG nova.scheduler.client.report [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1521.858460] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774070, 'name': ReconfigVM_Task, 'duration_secs': 0.852407} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.858460] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5/6df9af10-0053-4696-920a-10ab2af67ef5.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1521.858460] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08c733a4-aa6b-49b3-8dc0-2e564a4ac962 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.865137] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1521.865137] env[63371]: value = "task-1774073" [ 1521.865137] env[63371]: _type = "Task" [ 1521.865137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.874567] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774073, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.935670] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774071, 'name': PowerOffVM_Task, 'duration_secs': 0.310625} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.936541] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1521.936541] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1521.936541] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3eeec4e6-7f85-48a6-89c3-d2e4e80f02b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.041018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1522.041136] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1522.041321] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Deleting the datastore file [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1522.041588] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f78de94-7df4-4253-afec-80429038fcc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.049675] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1522.049675] env[63371]: value = "task-1774075" [ 1522.049675] env[63371]: _type = "Task" [ 1522.049675] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.068502] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.072883] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52abaf19-e2b7-714c-ce8c-abb323edf1f4, 'name': SearchDatastore_Task, 'duration_secs': 0.012423} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.072883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.072883] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1522.073081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.073081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.073253] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.073520] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df54ca74-0446-42e3-b32a-e26df546e42c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.082584] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.082801] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1522.083624] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22bc23ce-15dd-4bce-adb1-6c350866e24a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.089320] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1522.089320] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52238688-4844-9fd0-36d9-404110efa9b4" [ 1522.089320] env[63371]: _type = "Task" [ 1522.089320] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.097030] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52238688-4844-9fd0-36d9-404110efa9b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.173464] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1522.203178] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.203705] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1522.206957] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.543s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.207159] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.209318] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.143s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.211687] env[63371]: INFO nova.compute.claims [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1522.251345] env[63371]: INFO nova.scheduler.client.report [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleted allocations for instance 7e66011a-4fed-471f-82ea-e1016f92ad39 [ 1522.252976] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1522.376921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.376921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.382164] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774073, 'name': Rename_Task, 'duration_secs': 0.163363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.384750] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.384750] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec088847-1225-4703-a9c4-6259811ec981 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.390953] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1522.390953] env[63371]: value = "task-1774076" [ 1522.390953] env[63371]: _type = "Task" [ 1522.390953] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.400164] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774076, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.560018] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142375} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.560412] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1522.560942] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1522.561341] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1522.561514] env[63371]: INFO nova.compute.manager [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1522.562026] env[63371]: DEBUG oslo.service.loopingcall [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1522.562354] env[63371]: DEBUG nova.compute.manager [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1522.562525] env[63371]: DEBUG nova.network.neutron [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1522.604856] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52238688-4844-9fd0-36d9-404110efa9b4, 'name': SearchDatastore_Task, 'duration_secs': 0.00862} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.606456] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2010ad8c-aec0-46af-8e79-6ec302e45b04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.611913] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1522.611913] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa4eab-22b2-f18b-9152-8c331619f528" [ 1522.611913] env[63371]: _type = "Task" [ 1522.611913] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.622388] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa4eab-22b2-f18b-9152-8c331619f528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.683195] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updating instance_info_cache with network_info: [{"id": "d11a5154-6b30-4190-925a-4a07bc31709e", "address": "fa:16:3e:c2:c6:c2", "network": {"id": "ab1c6e5a-670b-45d7-8afa-d89b7e38f1aa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2086629518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fc50868ddcf4193beb9b3a8a37f97b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11a5154-6b", "ovs_interfaceid": "d11a5154-6b30-4190-925a-4a07bc31709e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.702307] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.716509] env[63371]: DEBUG nova.compute.utils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1522.720053] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1522.720209] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1522.764315] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.153s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.765874] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 31.501s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.766185] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.766970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.766970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.772646] env[63371]: INFO nova.compute.manager [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Terminating instance [ 1522.776781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.776781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.776781] env[63371]: DEBUG nova.network.neutron [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1522.813921] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Received event network-vif-deleted-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.819177] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Received event network-vif-plugged-d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.819177] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Acquiring lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.819177] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.819177] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.819177] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] No waiting events found dispatching network-vif-plugged-d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1522.819177] env[63371]: WARNING nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Received unexpected event network-vif-plugged-d11a5154-6b30-4190-925a-4a07bc31709e for instance with vm_state building and task_state spawning. [ 1522.819177] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Received event network-changed-d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.819177] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Refreshing instance network info cache due to event network-changed-d11a5154-6b30-4190-925a-4a07bc31709e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1522.819177] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Acquiring lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.855084] env[63371]: DEBUG nova.policy [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4302f381e0948438b9ee23a33a0f982', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35882164a8734563a006675f2ec6ba71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1522.907591] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774076, 'name': PowerOnVM_Task, 'duration_secs': 0.489575} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.907937] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1522.908199] env[63371]: INFO nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Took 9.62 seconds to spawn the instance on the hypervisor. [ 1522.908416] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1522.909249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1371d54e-9e43-4c3d-9d0e-3431bb1dce9d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.123975] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa4eab-22b2-f18b-9152-8c331619f528, 'name': SearchDatastore_Task, 'duration_secs': 0.010501} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.124335] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.124541] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/195de525-1081-4db6-acf3-04a6d3eb142f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1523.125042] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b378abff-2ae8-472c-b7dd-5d84ff83580c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.134148] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1523.134148] env[63371]: value = "task-1774077" [ 1523.134148] env[63371]: _type = "Task" [ 1523.134148] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.143470] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.189848] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Releasing lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.189848] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance network_info: |[{"id": "d11a5154-6b30-4190-925a-4a07bc31709e", "address": "fa:16:3e:c2:c6:c2", "network": {"id": "ab1c6e5a-670b-45d7-8afa-d89b7e38f1aa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2086629518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fc50868ddcf4193beb9b3a8a37f97b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11a5154-6b", "ovs_interfaceid": "d11a5154-6b30-4190-925a-4a07bc31709e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1523.191075] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Acquired lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.191155] env[63371]: DEBUG nova.network.neutron [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Refreshing network info cache for port d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1523.192737] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:c6:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '030ecc21-dc1c-4283-854e-88e623b3970a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd11a5154-6b30-4190-925a-4a07bc31709e', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1523.202889] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Creating folder: Project (2fc50868ddcf4193beb9b3a8a37f97b4). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1523.206123] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d86c0b5-bafb-43b3-a6b5-ad9f992528fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.219767] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Created folder: Project (2fc50868ddcf4193beb9b3a8a37f97b4) in parent group-v368199. [ 1523.219767] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Creating folder: Instances. Parent ref: group-v368351. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1523.219767] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-036668bb-4a82-499d-8186-ff273da7c16f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.228770] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1523.233585] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Created folder: Instances in parent group-v368351. [ 1523.233841] env[63371]: DEBUG oslo.service.loopingcall [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1523.234324] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1523.234570] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a98780fe-41e5-4240-ba30-87a74556503c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.256490] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1523.256490] env[63371]: value = "task-1774080" [ 1523.256490] env[63371]: _type = "Task" [ 1523.256490] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.269078] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774080, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.283200] env[63371]: DEBUG nova.compute.utils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Can not refresh info_cache because instance was not found {{(pid=63371) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1523.313966] env[63371]: DEBUG nova.network.neutron [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1523.342549] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Successfully updated port: 12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1523.438187] env[63371]: INFO nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Took 52.69 seconds to build instance. [ 1523.456397] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.456704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.465534] env[63371]: DEBUG nova.network.neutron [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.644415] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774077, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.774152] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774080, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.778524] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49f157a-8a88-402e-be13-ed2f40129b28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.786265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9eb6ed-c9ee-4fda-8af2-1e5683d87087 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.822794] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cba8757-cdcf-4cc0-ae9b-006d480f02df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.826352] env[63371]: DEBUG nova.compute.manager [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Received event network-vif-plugged-12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1523.826615] env[63371]: DEBUG oslo_concurrency.lockutils [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] Acquiring lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.826867] env[63371]: DEBUG oslo_concurrency.lockutils [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.827339] env[63371]: DEBUG oslo_concurrency.lockutils [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.827339] env[63371]: DEBUG nova.compute.manager [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] No waiting events found dispatching network-vif-plugged-12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1523.827561] env[63371]: WARNING nova.compute.manager [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Received unexpected event network-vif-plugged-12bfc72d-5ca7-4f11-8259-77887b5af47c for instance with vm_state building and task_state spawning. [ 1523.835828] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fd9f87-a56e-4d40-adb6-e66d9e5d8884 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.851062] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.851062] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.851319] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1523.853747] env[63371]: DEBUG nova.compute.provider_tree [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.942193] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "6df9af10-0053-4696-920a-10ab2af67ef5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.642s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.971441] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.971891] env[63371]: DEBUG nova.compute.manager [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1523.972105] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1523.972389] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f8d1a6d-4e97-4f60-9a6c-0734f7719d3f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.982707] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5facb5c-30ad-48ad-89cf-6dc68d54dddf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.001371] env[63371]: DEBUG nova.network.neutron [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.018528] env[63371]: WARNING nova.virt.vmwareapi.vmops [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7e66011a-4fed-471f-82ea-e1016f92ad39 could not be found. [ 1524.018528] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1524.018746] env[63371]: INFO nova.compute.manager [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1524.019020] env[63371]: DEBUG oslo.service.loopingcall [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.019992] env[63371]: DEBUG nova.compute.manager [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1524.020113] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1524.036712] env[63371]: DEBUG nova.network.neutron [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updated VIF entry in instance network info cache for port d11a5154-6b30-4190-925a-4a07bc31709e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1524.037092] env[63371]: DEBUG nova.network.neutron [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updating instance_info_cache with network_info: [{"id": "d11a5154-6b30-4190-925a-4a07bc31709e", "address": "fa:16:3e:c2:c6:c2", "network": {"id": "ab1c6e5a-670b-45d7-8afa-d89b7e38f1aa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2086629518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fc50868ddcf4193beb9b3a8a37f97b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11a5154-6b", "ovs_interfaceid": "d11a5154-6b30-4190-925a-4a07bc31709e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.048990] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1524.073037] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Successfully created port: cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1524.145586] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.146066] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/195de525-1081-4db6-acf3-04a6d3eb142f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1524.146408] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1524.146739] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a865f51e-d0a6-402b-bd7f-9690b37f63fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.160035] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1524.160035] env[63371]: value = "task-1774081" [ 1524.160035] env[63371]: _type = "Task" [ 1524.160035] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.167419] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774081, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.241147] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1524.272499] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1524.272641] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1524.272860] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.272971] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1524.273378] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.273617] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1524.275048] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1524.276090] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1524.276274] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1524.276503] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1524.276599] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1524.278115] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13aefdbf-427e-40e6-8d1f-63fe738fec94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.287049] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774080, 'name': CreateVM_Task, 'duration_secs': 0.624759} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.287748] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1524.288394] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.289910] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.289910] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1524.289910] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb7e2ee-0ac1-4350-b814-8230dd40142d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.295911] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c6da28-53ff-4df4-8a68-35228eb15875 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.301111] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1524.301111] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524fd62d-cf4c-a7cb-1c02-2c3ab4f60194" [ 1524.301111] env[63371]: _type = "Task" [ 1524.301111] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.317536] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524fd62d-cf4c-a7cb-1c02-2c3ab4f60194, 'name': SearchDatastore_Task, 'duration_secs': 0.009952} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.317839] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.318082] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1524.318568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.318720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.318933] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1524.319154] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b04a66f2-9484-4f42-9b56-8d895d25b7cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.327913] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1524.328136] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1524.328877] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0da821c-f0cd-4222-ab73-9008f7db5921 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.334511] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1524.334511] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b795ab-be23-ba3e-0fb1-e3e4abe2f683" [ 1524.334511] env[63371]: _type = "Task" [ 1524.334511] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.342602] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b795ab-be23-ba3e-0fb1-e3e4abe2f683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.358047] env[63371]: DEBUG nova.scheduler.client.report [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1524.445091] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1524.454292] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1524.506514] env[63371]: INFO nova.compute.manager [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 1.94 seconds to deallocate network for instance. [ 1524.539702] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Releasing lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.554712] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.675811] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774081, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066983} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.676731] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1524.677774] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd48f122-32cf-4c57-801c-5129a9602f32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.708564] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/195de525-1081-4db6-acf3-04a6d3eb142f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1524.712532] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30818503-ddbd-4335-9c71-063eb1eed572 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.731539] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1524.731539] env[63371]: value = "task-1774082" [ 1524.731539] env[63371]: _type = "Task" [ 1524.731539] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.742308] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774082, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.846548] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b795ab-be23-ba3e-0fb1-e3e4abe2f683, 'name': SearchDatastore_Task, 'duration_secs': 0.010141} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.847395] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d3748db-5ffa-4579-a29b-e46e97d3bc00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.853322] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1524.853322] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c883f1-9f8c-543b-639d-4d1b11b0ce05" [ 1524.853322] env[63371]: _type = "Task" [ 1524.853322] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.863534] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.864099] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1524.866772] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c883f1-9f8c-543b-639d-4d1b11b0ce05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.866968] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.615s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.868468] env[63371]: INFO nova.compute.claims [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.907358] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updating instance_info_cache with network_info: [{"id": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "address": "fa:16:3e:9e:61:49", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12bfc72d-5c", "ovs_interfaceid": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.973164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.013658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.057374] env[63371]: INFO nova.compute.manager [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 1.04 seconds to deallocate network for instance. [ 1525.243478] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.347783] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "1cb18f2a-6476-4492-8576-7b0fd693a107" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.348105] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.365244] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c883f1-9f8c-543b-639d-4d1b11b0ce05, 'name': SearchDatastore_Task, 'duration_secs': 0.009582} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.365504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.365688] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4/0e2c8ced-198f-43be-9d41-703a7c590df4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1525.366162] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f95acbd-40bc-4618-a625-0f9e9a0a35a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.373310] env[63371]: DEBUG nova.compute.utils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.378475] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1525.379236] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1525.384472] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1525.384472] env[63371]: value = "task-1774083" [ 1525.384472] env[63371]: _type = "Task" [ 1525.384472] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.399937] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.410896] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.411348] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance network_info: |[{"id": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "address": "fa:16:3e:9e:61:49", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12bfc72d-5c", "ovs_interfaceid": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1525.411844] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:61:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12bfc72d-5ca7-4f11-8259-77887b5af47c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1525.422948] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating folder: Project (e96348bcfea1455dad72945c7c36f027). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1525.423660] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bd9d7d4-ab40-4ba4-ab0e-bca205863494 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.435054] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created folder: Project (e96348bcfea1455dad72945c7c36f027) in parent group-v368199. [ 1525.435054] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating folder: Instances. Parent ref: group-v368354. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1525.435274] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-763dd160-285b-4570-91f9-cb73bff4ae16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.444442] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created folder: Instances in parent group-v368354. [ 1525.444704] env[63371]: DEBUG oslo.service.loopingcall [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1525.444878] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1525.445122] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4441799-e5d9-499a-81df-fe41fe585aa8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.469786] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1525.469786] env[63371]: value = "task-1774086" [ 1525.469786] env[63371]: _type = "Task" [ 1525.469786] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.478502] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774086, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.499360] env[63371]: DEBUG nova.policy [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c65e6dd9e4468fb1a0235bac086151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4ca8a73414142d497ebd3d3f043d9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1525.572832] env[63371]: INFO nova.compute.manager [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance disappeared during terminate [ 1525.572832] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.807s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.743569] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774082, 'name': ReconfigVM_Task, 'duration_secs': 0.735556} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.743889] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/195de525-1081-4db6-acf3-04a6d3eb142f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1525.744105] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63371) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1525.744781] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-7714a2b3-c16d-40a8-b036-5afff60ce867 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.751597] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1525.751597] env[63371]: value = "task-1774087" [ 1525.751597] env[63371]: _type = "Task" [ 1525.751597] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.760510] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774087, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.885606] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1525.908123] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774083, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.993612] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774086, 'name': CreateVM_Task, 'duration_secs': 0.488904} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.993612] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1525.993612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.993612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.993612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1525.993612] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-717b7566-e076-4ca3-a11a-0d6da9fb71cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.005627] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1526.005627] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5234c555-72c2-fc86-3898-80e04d1ed481" [ 1526.005627] env[63371]: _type = "Task" [ 1526.005627] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.013286] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5234c555-72c2-fc86-3898-80e04d1ed481, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.036641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.036770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.053806] env[63371]: DEBUG nova.compute.manager [req-d3ce9dd6-c364-4797-8928-a2d7464ef97d req-1265df32-c65a-4048-818a-1c9b33fa6340 service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Received event network-vif-deleted-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1526.269559] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774087, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.084752} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.270224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63371) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1526.273674] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce131ec2-d41f-43f3-a075-1cecc6fc134d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.301450] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/ephemeral_0.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1526.304257] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78935c75-fe97-4c63-857b-630481f6b279 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.323703] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1526.323703] env[63371]: value = "task-1774088" [ 1526.323703] env[63371]: _type = "Task" [ 1526.323703] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.336578] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774088, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.414774] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774083, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678666} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.417425] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4/0e2c8ced-198f-43be-9d41-703a7c590df4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1526.417642] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1526.419167] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2cedc21d-2c09-47c1-966a-7bd1f2aebbf2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.430430] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1526.430430] env[63371]: value = "task-1774089" [ 1526.430430] env[63371]: _type = "Task" [ 1526.430430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.441440] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.478460] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b304e862-a142-47c8-90a0-1c5b2d551c85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.487501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a75620-29a2-4eae-8f7e-7b6bfa4e84fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.526405] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e29e32-ab77-41b2-afc7-591365c83702 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.532034] env[63371]: DEBUG nova.compute.manager [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Received event network-changed-12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1526.532119] env[63371]: DEBUG nova.compute.manager [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Refreshing instance network info cache due to event network-changed-12bfc72d-5ca7-4f11-8259-77887b5af47c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1526.532514] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Acquiring lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.532611] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Acquired lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.532846] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Refreshing network info cache for port 12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1526.545864] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5234c555-72c2-fc86-3898-80e04d1ed481, 'name': SearchDatastore_Task, 'duration_secs': 0.04131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.547934] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139f2796-98db-40c1-9de3-a398179f8c86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.553026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.553026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1526.553235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.553378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.553558] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1526.554301] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70a369d7-131f-4370-881d-8df994758e5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.568175] env[63371]: DEBUG nova.compute.provider_tree [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1526.570601] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1526.570801] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1526.572562] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef9b3e3f-d62f-4102-b871-1cae828de087 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.577938] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1526.577938] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52be4a31-235b-6af1-54a4-243b6fd0e2f2" [ 1526.577938] env[63371]: _type = "Task" [ 1526.577938] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.582373] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Successfully created port: f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1526.591246] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52be4a31-235b-6af1-54a4-243b6fd0e2f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010459} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.592303] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294a5607-2177-46ef-a95c-6d07cffe72aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.597501] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1526.597501] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d460c6-3c1c-b2e9-4a3b-f0044a6d8355" [ 1526.597501] env[63371]: _type = "Task" [ 1526.597501] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.606987] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d460c6-3c1c-b2e9-4a3b-f0044a6d8355, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.690917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.691274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.691585] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.691736] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.691958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.694413] env[63371]: INFO nova.compute.manager [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Terminating instance [ 1526.696469] env[63371]: DEBUG nova.compute.manager [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1526.696786] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1526.697600] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857bf924-9580-4a8a-bfb8-bcc8007f9319 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.705559] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1526.705884] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ea44f56-0802-46fa-840c-274f7ab8b527 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.711848] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1526.711848] env[63371]: value = "task-1774090" [ 1526.711848] env[63371]: _type = "Task" [ 1526.711848] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.722095] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.834043] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.911354] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1526.942782] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074694} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1526.946475] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1526.946475] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1526.946983] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1526.946983] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1526.950107] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c448b552-cd38-4f87-bd99-c72c982b6e10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.954149] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d9b06a-6648-4217-ad33-1b4a2e98d08b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.975847] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03744f9-1f57-4fee-9e96-328f446e3cc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.989952] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4/0e2c8ced-198f-43be-9d41-703a7c590df4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1526.990705] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-969d29c9-2541-42fb-b41f-d37fdd229a62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.007476] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Successfully updated port: cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1527.019953] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1527.019953] env[63371]: value = "task-1774091" [ 1527.019953] env[63371]: _type = "Task" [ 1527.019953] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.029972] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774091, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.073293] env[63371]: DEBUG nova.scheduler.client.report [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.109177] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d460c6-3c1c-b2e9-4a3b-f0044a6d8355, 'name': SearchDatastore_Task, 'duration_secs': 0.009099} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.109467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.109791] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1527.110085] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4fc0625-b806-4df5-9a42-41246a77dc8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.118841] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1527.118841] env[63371]: value = "task-1774092" [ 1527.118841] env[63371]: _type = "Task" [ 1527.118841] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.130772] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.228846] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774090, 'name': PowerOffVM_Task, 'duration_secs': 0.511639} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.228846] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1527.228846] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1527.228846] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53c8ccd3-062b-4d4e-b34e-9444ed63cd3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.308163] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1527.308163] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1527.308163] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleting the datastore file [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1527.308443] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c0ee9f3-e63e-49c2-ab2c-db5ef71152ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.323526] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1527.323526] env[63371]: value = "task-1774094" [ 1527.323526] env[63371]: _type = "Task" [ 1527.323526] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.337092] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.341265] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774088, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.513850] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.513850] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.513850] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1527.535950] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774091, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.580643] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.580643] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1527.583972] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.165s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.584274] env[63371]: DEBUG nova.objects.instance [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lazy-loading 'resources' on Instance uuid fb2ddd3e-7adc-4a34-8797-0e98fdf19379 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1527.629627] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774092, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.842894] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445786} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.847049] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.847049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.847049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.847049] env[63371]: INFO nova.compute.manager [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1527.847049] env[63371]: DEBUG oslo.service.loopingcall [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.847296] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774088, 'name': ReconfigVM_Task, 'duration_secs': 1.022792} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.847492] env[63371]: DEBUG nova.compute.manager [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1527.847633] env[63371]: DEBUG nova.network.neutron [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.849270] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/ephemeral_0.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1527.849970] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-186be719-b11c-4f6b-9b7a-86a0ff5c3c5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.857219] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1527.857219] env[63371]: value = "task-1774095" [ 1527.857219] env[63371]: _type = "Task" [ 1527.857219] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.869049] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774095, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.032360] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774091, 'name': ReconfigVM_Task, 'duration_secs': 0.707711} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.032578] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4/0e2c8ced-198f-43be-9d41-703a7c590df4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1528.033259] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4303afad-de5b-4d51-a8c8-41987e746637 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.040776] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1528.040776] env[63371]: value = "task-1774096" [ 1528.040776] env[63371]: _type = "Task" [ 1528.040776] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.051024] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774096, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.062873] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1528.095264] env[63371]: DEBUG nova.compute.utils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.099623] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1528.099800] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1528.139155] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774092, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587414} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.139370] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1528.139585] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1528.139860] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1fe560a-5f53-4e9e-87fe-a819fe10b759 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.146371] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updated VIF entry in instance network info cache for port 12bfc72d-5ca7-4f11-8259-77887b5af47c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1528.146706] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updating instance_info_cache with network_info: [{"id": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "address": "fa:16:3e:9e:61:49", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12bfc72d-5c", "ovs_interfaceid": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.153564] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1528.153564] env[63371]: value = "task-1774097" [ 1528.153564] env[63371]: _type = "Task" [ 1528.153564] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.171636] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.195917] env[63371]: DEBUG nova.policy [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c37f7c756994f8587c0ff8c0b2b6c43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fe8537857034ada970b516fcf2fce57', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1528.371906] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774095, 'name': Rename_Task, 'duration_secs': 0.340535} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.376081] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1528.376408] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6eafe1cc-9977-4697-b0a5-39b5b74603d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.385295] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1528.385295] env[63371]: value = "task-1774098" [ 1528.385295] env[63371]: _type = "Task" [ 1528.385295] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.397058] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774098, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.556476] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774096, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.563843] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updating instance_info_cache with network_info: [{"id": "cf8050ea-381c-487b-9981-c3f042d673e1", "address": "fa:16:3e:86:04:09", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8050ea-38", "ovs_interfaceid": "cf8050ea-381c-487b-9981-c3f042d673e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.607528] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1528.618484] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c56ffc-2ee8-452a-b37c-160b2f82fb02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.627825] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bd3aec-a7c9-4ba5-bb5c-1d65b667b0cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.668840] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Releasing lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.669470] env[63371]: DEBUG nova.compute.manager [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received event network-changed-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1528.669470] env[63371]: DEBUG nova.compute.manager [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Refreshing instance network info cache due to event network-changed-01b878e5-651e-49f1-959f-7da17291c0bc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1528.669596] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.670241] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.670241] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Refreshing network info cache for port 01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1528.678029] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3ab504-df56-4134-96a1-d083c5cc2468 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.687855] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065576} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.688740] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1528.689661] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9ac2d1-dad4-4977-87a9-5c40ee312402 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.693622] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0dc4f7f-3976-4a12-a850-62daddd68ef7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.721416] env[63371]: DEBUG nova.compute.provider_tree [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.731770] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1528.733302] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7665183f-6eb1-4fbb-83cc-6c4cfa74817d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.748600] env[63371]: DEBUG nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received event network-vif-plugged-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1528.748805] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Acquiring lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.749079] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.749210] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.749373] env[63371]: DEBUG nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] No waiting events found dispatching network-vif-plugged-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1528.749533] env[63371]: WARNING nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received unexpected event network-vif-plugged-cf8050ea-381c-487b-9981-c3f042d673e1 for instance with vm_state building and task_state spawning. [ 1528.749689] env[63371]: DEBUG nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received event network-changed-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1528.749846] env[63371]: DEBUG nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Refreshing instance network info cache due to event network-changed-cf8050ea-381c-487b-9981-c3f042d673e1. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1528.750020] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Acquiring lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.759075] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1528.759075] env[63371]: value = "task-1774099" [ 1528.759075] env[63371]: _type = "Task" [ 1528.759075] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.768481] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774099, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.888601] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "0c9156ea-81c4-4286-a20b-66068a5bce59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.888866] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.901540] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774098, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.914844] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Successfully created port: dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1529.053459] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774096, 'name': Rename_Task, 'duration_secs': 0.960468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.053762] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1529.054174] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef1ce9dd-f84c-4a69-b784-0ead5dc74e2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.063039] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1529.063039] env[63371]: value = "task-1774100" [ 1529.063039] env[63371]: _type = "Task" [ 1529.063039] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.071041] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.073415] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Instance network_info: |[{"id": "cf8050ea-381c-487b-9981-c3f042d673e1", "address": "fa:16:3e:86:04:09", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8050ea-38", "ovs_interfaceid": "cf8050ea-381c-487b-9981-c3f042d673e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1529.073706] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774100, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.073945] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Acquired lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.074129] env[63371]: DEBUG nova.network.neutron [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Refreshing network info cache for port cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1529.075456] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:04:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf8050ea-381c-487b-9981-c3f042d673e1', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.086070] env[63371]: DEBUG oslo.service.loopingcall [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.087141] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1529.087430] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ef85f9d-34ca-4446-859d-5f200b06f731 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.102097] env[63371]: DEBUG nova.network.neutron [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.109461] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.109461] env[63371]: value = "task-1774101" [ 1529.109461] env[63371]: _type = "Task" [ 1529.109461] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.125187] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774101, 'name': CreateVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.219396] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Successfully updated port: f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1529.236704] env[63371]: DEBUG nova.scheduler.client.report [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1529.269761] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774099, 'name': ReconfigVM_Task, 'duration_secs': 0.492686} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.270067] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1529.271163] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c14d6ef-2447-4e27-b817-6ee3407a2bb3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.277043] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1529.277043] env[63371]: value = "task-1774102" [ 1529.277043] env[63371]: _type = "Task" [ 1529.277043] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.285736] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774102, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.399109] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774098, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.537999] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updated VIF entry in instance network info cache for port 01b878e5-651e-49f1-959f-7da17291c0bc. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1529.538438] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.577453] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774100, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.604888] env[63371]: INFO nova.compute.manager [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Took 1.76 seconds to deallocate network for instance. [ 1529.622042] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1529.624068] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774101, 'name': CreateVM_Task, 'duration_secs': 0.475626} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.626849] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1529.627777] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.627777] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.628072] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1529.628790] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c897c89-4475-4eb4-89a7-c43b1ca21edd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.634178] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1529.634178] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c0779c-67cb-ed77-47c4-c7b8bee69ee9" [ 1529.634178] env[63371]: _type = "Task" [ 1529.634178] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.646209] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c0779c-67cb-ed77-47c4-c7b8bee69ee9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.651939] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.652255] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.652377] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.652626] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.652684] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.652793] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.653022] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.653179] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.653347] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.653516] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.653639] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.654509] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c227898a-f21f-4440-ae82-e2d629f3cdfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.666031] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b58798-f3e8-4113-9de8-7281a1d64e97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.722960] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.723201] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.723349] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1529.742445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.158s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.745747] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.055s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.748243] env[63371]: DEBUG nova.objects.instance [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lazy-loading 'resources' on Instance uuid 36b81143-211f-4c77-854b-abe0d3f39ce4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1529.776380] env[63371]: INFO nova.scheduler.client.report [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted allocations for instance fb2ddd3e-7adc-4a34-8797-0e98fdf19379 [ 1529.790411] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774102, 'name': Rename_Task, 'duration_secs': 0.258587} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.790963] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1529.791481] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5bb63233-8775-4a70-b384-9b4d3443b5d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.799943] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1529.799943] env[63371]: value = "task-1774103" [ 1529.799943] env[63371]: _type = "Task" [ 1529.799943] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.809022] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.897617] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774098, 'name': PowerOnVM_Task, 'duration_secs': 1.098409} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.898513] env[63371]: DEBUG nova.network.neutron [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updated VIF entry in instance network info cache for port cf8050ea-381c-487b-9981-c3f042d673e1. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1529.902024] env[63371]: DEBUG nova.network.neutron [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updating instance_info_cache with network_info: [{"id": "cf8050ea-381c-487b-9981-c3f042d673e1", "address": "fa:16:3e:86:04:09", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8050ea-38", "ovs_interfaceid": "cf8050ea-381c-487b-9981-c3f042d673e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.902024] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1529.902024] env[63371]: INFO nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Took 13.33 seconds to spawn the instance on the hypervisor. [ 1529.902024] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1529.902024] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aab6e9a-517d-42fb-8775-7604106ccc22 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.041662] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.075454] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774100, 'name': PowerOnVM_Task, 'duration_secs': 0.597983} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.075740] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1530.075943] env[63371]: INFO nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Took 11.32 seconds to spawn the instance on the hypervisor. [ 1530.076127] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1530.077048] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0825b0-7b80-4f5b-9af4-64b3790042ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.115853] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.145861] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c0779c-67cb-ed77-47c4-c7b8bee69ee9, 'name': SearchDatastore_Task, 'duration_secs': 0.012487} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.146102] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.146345] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.146619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.146778] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.146956] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1530.147232] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-738cf14a-face-4d72-9355-56364c8ac299 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.156483] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1530.156675] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1530.157443] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31367f7-f75c-4e51-a45a-d269fd7cfa85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.167957] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1530.167957] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52426b06-3cea-6015-c10d-feea60e42a99" [ 1530.167957] env[63371]: _type = "Task" [ 1530.167957] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.178229] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52426b06-3cea-6015-c10d-feea60e42a99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.278482] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1530.287928] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.920s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.315721] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774103, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.405702] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Releasing lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.422324] env[63371]: INFO nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Took 52.81 seconds to build instance. [ 1530.436403] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.436849] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.495647] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updating instance_info_cache with network_info: [{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.600247] env[63371]: INFO nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Took 49.66 seconds to build instance. [ 1530.610447] env[63371]: DEBUG nova.compute.manager [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Received event network-vif-plugged-dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1530.610699] env[63371]: DEBUG oslo_concurrency.lockutils [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] Acquiring lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.611321] env[63371]: DEBUG oslo_concurrency.lockutils [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.611321] env[63371]: DEBUG oslo_concurrency.lockutils [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.611321] env[63371]: DEBUG nova.compute.manager [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] No waiting events found dispatching network-vif-plugged-dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1530.611802] env[63371]: WARNING nova.compute.manager [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Received unexpected event network-vif-plugged-dc1a6185-a139-4788-bbd2-d5540dd42733 for instance with vm_state building and task_state spawning. [ 1530.684139] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52426b06-3cea-6015-c10d-feea60e42a99, 'name': SearchDatastore_Task, 'duration_secs': 0.009674} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.684992] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1df0ac45-7aa6-4ebf-9632-68bebf6dee64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.694792] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1530.694792] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524517d3-d158-8f84-967c-8f1ff597c474" [ 1530.694792] env[63371]: _type = "Task" [ 1530.694792] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.704925] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524517d3-d158-8f84-967c-8f1ff597c474, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.758632] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1530.758869] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing instance network info cache due to event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1530.759206] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquiring lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.759252] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquired lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.759404] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1530.784191] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Successfully updated port: dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.812079] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774103, 'name': PowerOnVM_Task, 'duration_secs': 0.895627} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.812376] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1530.812578] env[63371]: INFO nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1530.812748] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1530.816164] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304c5d62-3be8-48f0-93b6-c0c477b5f903 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.838285] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af11eea-4a91-4d72-88f3-76cf7e8e08db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.847303] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4219e446-75fd-4045-8921-18e6dfec71f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.887027] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9ee2c6-3ab0-4231-b8b1-194c25ce81aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.896284] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52265e59-636d-4439-a0fd-8c75720a218a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.913923] env[63371]: DEBUG nova.compute.provider_tree [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.927279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "195de525-1081-4db6-acf3-04a6d3eb142f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.254s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.999667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.001268] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Instance network_info: |[{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1531.001268] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:60:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7958f5c-d0af-44e7-bbb2-e6fa265a6da3', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1531.011217] env[63371]: DEBUG oslo.service.loopingcall [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.011462] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1531.011685] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2b90798-7a80-4594-8fa4-9e623f36fb44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.034230] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1531.034230] env[63371]: value = "task-1774104" [ 1531.034230] env[63371]: _type = "Task" [ 1531.034230] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.043079] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774104, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.103047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.993s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.210677] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524517d3-d158-8f84-967c-8f1ff597c474, 'name': SearchDatastore_Task, 'duration_secs': 0.009977} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.211088] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.211370] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed/150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1531.211776] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33f71057-f921-4498-9167-2dbbb77cd9f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.219147] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1531.219147] env[63371]: value = "task-1774105" [ 1531.219147] env[63371]: _type = "Task" [ 1531.219147] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.226977] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.286508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.286699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquired lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.286890] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1531.341860] env[63371]: INFO nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Took 47.72 seconds to build instance. [ 1531.416717] env[63371]: DEBUG nova.scheduler.client.report [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1531.430693] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1531.546192] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774104, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.606516] env[63371]: DEBUG nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1531.728877] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774105, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497648} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.731211] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed/150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1531.731455] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1531.731733] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b1744d3-c69d-49d3-98ef-9ba614d6066e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.735209] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updated VIF entry in instance network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1531.735286] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.738237] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1531.738237] env[63371]: value = "task-1774106" [ 1531.738237] env[63371]: _type = "Task" [ 1531.738237] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.746055] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774106, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "0e2c8ced-198f-43be-9d41-703a7c590df4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.815497] env[63371]: INFO nova.compute.manager [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Terminating instance [ 1531.818707] env[63371]: DEBUG nova.compute.manager [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1531.819053] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1531.819976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62700eff-ddeb-432f-8f67-1b501881b2c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.828586] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1531.829205] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c45d8c2-8820-4f93-81e0-d6ae0dd75739 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.837191] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.838502] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1531.838502] env[63371]: value = "task-1774107" [ 1531.838502] env[63371]: _type = "Task" [ 1531.838502] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.848696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.066s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.849166] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774107, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.925274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.179s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.927270] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.176s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.929128] env[63371]: INFO nova.compute.claims [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1531.948697] env[63371]: INFO nova.scheduler.client.report [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted allocations for instance 36b81143-211f-4c77-854b-abe0d3f39ce4 [ 1531.962615] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.021385] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updating instance_info_cache with network_info: [{"id": "dc1a6185-a139-4788-bbd2-d5540dd42733", "address": "fa:16:3e:ec:b8:32", "network": {"id": "6f1d0296-7fae-445e-95cd-2ec84607e8cb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1953892032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe8537857034ada970b516fcf2fce57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc1a6185-a1", "ovs_interfaceid": "dc1a6185-a139-4788-bbd2-d5540dd42733", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.048028] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774104, 'name': CreateVM_Task, 'duration_secs': 0.522182} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.048028] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1532.048746] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.048854] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.049951] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1532.049951] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f3fabe0-ab7c-4e72-a131-9acb9d91f534 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.054727] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1532.054727] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527f3e64-a77e-3b37-dfd6-dbe8fc4acd15" [ 1532.054727] env[63371]: _type = "Task" [ 1532.054727] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.063807] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527f3e64-a77e-3b37-dfd6-dbe8fc4acd15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.146415] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.237433] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Releasing lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.237793] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-vif-deleted-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.237885] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Received event network-vif-plugged-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.238084] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquiring lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.238306] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.238474] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.238632] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] No waiting events found dispatching network-vif-plugged-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1532.238825] env[63371]: WARNING nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Received unexpected event network-vif-plugged-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 for instance with vm_state building and task_state spawning. [ 1532.238986] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Received event network-changed-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.239209] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Refreshing instance network info cache due to event network-changed-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1532.239330] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquiring lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.239466] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquired lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.239617] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Refreshing network info cache for port f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.250561] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774106, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072946} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.250856] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1532.251630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f70c889-3fd0-4bd8-b843-c0b93b5eaa7b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.276379] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed/150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1532.276960] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b8b27bc-cb2b-476c-9885-4cf2c5bd4317 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.296564] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1532.296564] env[63371]: value = "task-1774108" [ 1532.296564] env[63371]: _type = "Task" [ 1532.296564] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.305577] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.348358] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774107, 'name': PowerOffVM_Task, 'duration_secs': 0.286782} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.348634] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1532.348833] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1532.349116] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c6ae630-5fc9-4b54-9569-9bfab712e9f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.352187] env[63371]: DEBUG nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1532.456797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.902s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.480583] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1532.480862] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1532.481044] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Deleting the datastore file [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1532.481305] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c96bbfbf-a248-4e09-8ee5-9866afffbd26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.488853] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1532.488853] env[63371]: value = "task-1774110" [ 1532.488853] env[63371]: _type = "Task" [ 1532.488853] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.496968] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774110, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.524300] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Releasing lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.524562] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance network_info: |[{"id": "dc1a6185-a139-4788-bbd2-d5540dd42733", "address": "fa:16:3e:ec:b8:32", "network": {"id": "6f1d0296-7fae-445e-95cd-2ec84607e8cb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1953892032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe8537857034ada970b516fcf2fce57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc1a6185-a1", "ovs_interfaceid": "dc1a6185-a139-4788-bbd2-d5540dd42733", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1532.524973] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:b8:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc1a6185-a139-4788-bbd2-d5540dd42733', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.532637] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Creating folder: Project (3fe8537857034ada970b516fcf2fce57). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1532.532909] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-809c908a-6c5f-4930-aaee-05923eb0b58f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.543495] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Created folder: Project (3fe8537857034ada970b516fcf2fce57) in parent group-v368199. [ 1532.543675] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Creating folder: Instances. Parent ref: group-v368359. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1532.543910] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2d50a30-25b2-4b32-8a3a-e3804fca4624 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.553213] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Created folder: Instances in parent group-v368359. [ 1532.553491] env[63371]: DEBUG oslo.service.loopingcall [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.553694] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1532.553938] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5930d71-9d18-47ee-96dd-27d319d61dd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.580207] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527f3e64-a77e-3b37-dfd6-dbe8fc4acd15, 'name': SearchDatastore_Task, 'duration_secs': 0.012336} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.581519] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.581762] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1532.581994] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.582161] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.582335] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1532.582568] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.582568] env[63371]: value = "task-1774113" [ 1532.582568] env[63371]: _type = "Task" [ 1532.582568] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.582742] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c5bcc0c-a561-48b3-b5db-2573de0810d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.592662] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774113, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.594029] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1532.594029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1532.594677] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8549fa25-26c3-42e2-9082-ce545ebfced4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.599670] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1532.599670] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525e6f78-8c41-ff53-ed62-638ca80e7c15" [ 1532.599670] env[63371]: _type = "Task" [ 1532.599670] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.607942] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525e6f78-8c41-ff53-ed62-638ca80e7c15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.631742] env[63371]: INFO nova.compute.manager [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Rebuilding instance [ 1532.676303] env[63371]: DEBUG nova.compute.manager [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1532.676525] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685d9f79-cc20-4a26-bb64-774e95cdb1aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.715434] env[63371]: DEBUG nova.compute.manager [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Received event network-changed-dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.715644] env[63371]: DEBUG nova.compute.manager [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Refreshing instance network info cache due to event network-changed-dc1a6185-a139-4788-bbd2-d5540dd42733. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1532.716088] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Acquiring lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.716088] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Acquired lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.716216] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Refreshing network info cache for port dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.811880] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.882214] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.013444] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774110, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14331} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.013444] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1533.013444] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1533.013444] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1533.013444] env[63371]: INFO nova.compute.manager [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1533.013444] env[63371]: DEBUG oslo.service.loopingcall [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1533.013444] env[63371]: DEBUG nova.compute.manager [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1533.013444] env[63371]: DEBUG nova.network.neutron [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1533.052813] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updated VIF entry in instance network info cache for port f7958f5c-d0af-44e7-bbb2-e6fa265a6da3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1533.052813] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updating instance_info_cache with network_info: [{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.098903] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774113, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.114059] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525e6f78-8c41-ff53-ed62-638ca80e7c15, 'name': SearchDatastore_Task, 'duration_secs': 0.009133} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.114888] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-649afccd-c29d-425b-bd1f-7c4d33e9d4e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.122946] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1533.122946] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524a6eb1-03ae-b3b2-5115-a962e877dad7" [ 1533.122946] env[63371]: _type = "Task" [ 1533.122946] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.132204] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524a6eb1-03ae-b3b2-5115-a962e877dad7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.187608] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1533.191403] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67fa86c4-8196-43c7-9f6b-fa1838db2d04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.199723] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1533.199723] env[63371]: value = "task-1774114" [ 1533.199723] env[63371]: _type = "Task" [ 1533.199723] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.213063] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.311724] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774108, 'name': ReconfigVM_Task, 'duration_secs': 0.686826} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.311954] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed/150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1533.312620] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a03479b-ed88-4b85-bb78-fa6eea70ed76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.325206] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1533.325206] env[63371]: value = "task-1774115" [ 1533.325206] env[63371]: _type = "Task" [ 1533.325206] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.335923] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774115, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.427999] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b4ad38-eb86-4ab4-a530-9554a180af29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.442502] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531fc2b3-d088-429c-8cc8-f659d246986c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.477760] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8413822-50a4-4c92-a7b4-d9d61f5a39e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.488040] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927f0121-76f7-4671-8b93-a7f0a6d7349d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.509573] env[63371]: DEBUG nova.compute.provider_tree [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1533.541034] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updated VIF entry in instance network info cache for port dc1a6185-a139-4788-bbd2-d5540dd42733. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1533.541156] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updating instance_info_cache with network_info: [{"id": "dc1a6185-a139-4788-bbd2-d5540dd42733", "address": "fa:16:3e:ec:b8:32", "network": {"id": "6f1d0296-7fae-445e-95cd-2ec84607e8cb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1953892032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe8537857034ada970b516fcf2fce57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc1a6185-a1", "ovs_interfaceid": "dc1a6185-a139-4788-bbd2-d5540dd42733", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.556885] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Releasing lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.595704] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774113, 'name': CreateVM_Task, 'duration_secs': 0.655521} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.595880] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.596754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.596984] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.597341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.597638] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ed72527-067b-47a9-9842-ac6035c2022a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.602188] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1533.602188] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528eb6ea-b724-47c9-9838-b081a6550eca" [ 1533.602188] env[63371]: _type = "Task" [ 1533.602188] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.609703] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528eb6ea-b724-47c9-9838-b081a6550eca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.615554] env[63371]: DEBUG nova.compute.manager [req-cfa2c2c4-4ff3-430e-a3ad-5f7c3a7a8b07 req-8183c2ac-a7e1-453c-b1fe-a1a84484b139 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Received event network-vif-deleted-d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1533.615741] env[63371]: INFO nova.compute.manager [req-cfa2c2c4-4ff3-430e-a3ad-5f7c3a7a8b07 req-8183c2ac-a7e1-453c-b1fe-a1a84484b139 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Neutron deleted interface d11a5154-6b30-4190-925a-4a07bc31709e; detaching it from the instance and deleting it from the info cache [ 1533.615905] env[63371]: DEBUG nova.network.neutron [req-cfa2c2c4-4ff3-430e-a3ad-5f7c3a7a8b07 req-8183c2ac-a7e1-453c-b1fe-a1a84484b139 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.634307] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524a6eb1-03ae-b3b2-5115-a962e877dad7, 'name': SearchDatastore_Task, 'duration_secs': 0.009419} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.634531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.634817] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609/1c93487b-6d8f-424d-8b95-10bfb894c609.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1533.635103] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0627af8f-728c-4540-a351-0a0ae87796ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.642697] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1533.642697] env[63371]: value = "task-1774116" [ 1533.642697] env[63371]: _type = "Task" [ 1533.642697] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.651489] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.709671] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774114, 'name': PowerOffVM_Task, 'duration_secs': 0.195239} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.710337] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1533.710337] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1533.710969] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac0e34f-3eea-426d-99aa-f0be8aa101e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.717622] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1533.717853] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc5a9a0c-8190-4ca1-9b24-86123a98e099 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.791875] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1533.792198] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1533.792441] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1533.792738] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fdc9ace-bdaf-4aa3-be10-319f234c56ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.799768] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1533.799768] env[63371]: value = "task-1774118" [ 1533.799768] env[63371]: _type = "Task" [ 1533.799768] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.808601] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.834764] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774115, 'name': Rename_Task, 'duration_secs': 0.202837} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.835120] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1533.835393] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d514c97-d3c5-458f-8bb6-154a42a2a013 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.842686] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1533.842686] env[63371]: value = "task-1774119" [ 1533.842686] env[63371]: _type = "Task" [ 1533.842686] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.851727] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.936468] env[63371]: DEBUG nova.network.neutron [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.020324] env[63371]: DEBUG nova.scheduler.client.report [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1534.044283] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Releasing lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.044518] env[63371]: DEBUG nova.compute.manager [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received event network-changed-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1534.044663] env[63371]: DEBUG nova.compute.manager [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Refreshing instance network info cache due to event network-changed-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1534.045089] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Acquiring lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.045089] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Acquired lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.045301] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Refreshing network info cache for port 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1534.120136] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528eb6ea-b724-47c9-9838-b081a6550eca, 'name': SearchDatastore_Task, 'duration_secs': 0.009772} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.120136] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-daf0f2c6-7d27-485e-9abb-c0c926c99e40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.121776] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.122026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.122255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.122392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.122559] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1534.122811] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4de41325-1d7d-489b-8c0d-2cff9a811639 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.132019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2672b46-209d-4371-9bce-b76eeb80f028 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.144568] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1534.145729] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1534.149026] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b6a0a50-6944-461c-9fe0-447001a43017 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.156258] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468693} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.168685] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609/1c93487b-6d8f-424d-8b95-10bfb894c609.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1534.168928] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1534.169318] env[63371]: DEBUG nova.compute.manager [req-cfa2c2c4-4ff3-430e-a3ad-5f7c3a7a8b07 req-8183c2ac-a7e1-453c-b1fe-a1a84484b139 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Detach interface failed, port_id=d11a5154-6b30-4190-925a-4a07bc31709e, reason: Instance 0e2c8ced-198f-43be-9d41-703a7c590df4 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1534.169778] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1534.169778] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52886e6c-f58a-edcc-d930-03a8683298ee" [ 1534.169778] env[63371]: _type = "Task" [ 1534.169778] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.169973] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aae3e331-3d85-4ae1-b33a-595aa2ae758f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.182291] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52886e6c-f58a-edcc-d930-03a8683298ee, 'name': SearchDatastore_Task, 'duration_secs': 0.01014} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.184034] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1534.184034] env[63371]: value = "task-1774120" [ 1534.184034] env[63371]: _type = "Task" [ 1534.184034] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.184245] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ed4e2e-23f2-4231-b377-a2ef85ea8d83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.191911] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1534.191911] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b898c5-abc0-2dea-a202-a0cb095ce410" [ 1534.191911] env[63371]: _type = "Task" [ 1534.191911] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.195314] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.203134] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b898c5-abc0-2dea-a202-a0cb095ce410, 'name': SearchDatastore_Task, 'duration_secs': 0.008783} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.203398] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.203650] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e/d6bc618e-33c9-4b45-b79f-afe6811acd4e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1534.204107] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbb4ebe8-771f-4873-936f-5e4637500ab0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.210670] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1534.210670] env[63371]: value = "task-1774121" [ 1534.210670] env[63371]: _type = "Task" [ 1534.210670] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.219074] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.309839] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.412171} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.310129] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1534.310349] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1534.310482] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1534.362695] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774119, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.440050] env[63371]: INFO nova.compute.manager [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Took 1.43 seconds to deallocate network for instance. [ 1534.531353] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.532026] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1534.537558] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 30.570s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.696651] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.696882] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1534.697984] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ee7de4-b741-42ab-ab2c-d10a2034b421 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.725393] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609/1c93487b-6d8f-424d-8b95-10bfb894c609.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1534.734480] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5db3f33-992c-4310-a5b5-5240c3097655 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.757068] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774121, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.758857] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e/d6bc618e-33c9-4b45-b79f-afe6811acd4e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1534.759176] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1534.759955] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1534.759955] env[63371]: value = "task-1774122" [ 1534.759955] env[63371]: _type = "Task" [ 1534.759955] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.760224] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c96f4941-029e-476f-b0f6-c7914d3dc308 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.772435] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774122, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.773771] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1534.773771] env[63371]: value = "task-1774123" [ 1534.773771] env[63371]: _type = "Task" [ 1534.773771] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.782538] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774123, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.858121] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774119, 'name': PowerOnVM_Task, 'duration_secs': 0.542429} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.858611] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1534.858964] env[63371]: INFO nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Took 10.62 seconds to spawn the instance on the hypervisor. [ 1534.859289] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1534.861127] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299f7e8a-7b30-432f-aef5-d75eab735331 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.914610] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updated VIF entry in instance network info cache for port 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1534.915177] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updating instance_info_cache with network_info: [{"id": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "address": "fa:16:3e:33:87:bc", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9291f7-15", "ovs_interfaceid": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.946390] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.044018] env[63371]: DEBUG nova.compute.utils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1535.046414] env[63371]: INFO nova.compute.claims [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1535.050143] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1535.050143] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1535.115386] env[63371]: DEBUG nova.policy [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25f28e53648c41d1a147c1aa04f0a708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fb0da840f6847f19f03a1db8a1c3f4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1535.272647] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.283024] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774123, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071083} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.283682] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1535.284178] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5515c089-1600-4491-b797-d5a7656535fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.312081] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e/d6bc618e-33c9-4b45-b79f-afe6811acd4e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1535.312423] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-112c2c8f-0c36-4075-a8f9-f018f1467742 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.338658] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1535.338658] env[63371]: value = "task-1774124" [ 1535.338658] env[63371]: _type = "Task" [ 1535.338658] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.347667] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.363816] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1535.364068] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1535.364227] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1535.364402] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1535.364540] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1535.364681] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1535.364881] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1535.369410] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1535.369691] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1535.369874] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1535.370086] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1535.371018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dd244f-ba12-451a-8e64-cfe864d42430 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.385618] env[63371]: INFO nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Took 47.26 seconds to build instance. [ 1535.387595] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3165c5dc-6355-4d35-acd9-41f0cb60d854 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.405189] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:61:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12bfc72d-5ca7-4f11-8259-77887b5af47c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1535.412774] env[63371]: DEBUG oslo.service.loopingcall [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1535.413756] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1535.413998] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01638d82-cbf9-44aa-b3b4-04865c3e41a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.429260] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Releasing lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.436976] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1535.436976] env[63371]: value = "task-1774125" [ 1535.436976] env[63371]: _type = "Task" [ 1535.436976] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.443446] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774125, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.479728] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Successfully created port: d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1535.551333] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1535.560268] env[63371]: INFO nova.compute.resource_tracker [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating resource usage from migration 0d1a44d4-1ccf-4ed5-a60b-ac0e82931d09 [ 1535.774733] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774122, 'name': ReconfigVM_Task, 'duration_secs': 0.969393} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.775183] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609/1c93487b-6d8f-424d-8b95-10bfb894c609.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1535.775664] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f2c3c69-8530-4d10-b492-8db7f00ddeb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.781430] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1535.781430] env[63371]: value = "task-1774126" [ 1535.781430] env[63371]: _type = "Task" [ 1535.781430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.789460] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774126, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.852215] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.893170] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.039s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.948321] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774125, 'name': CreateVM_Task, 'duration_secs': 0.343543} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.950764] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1535.951861] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.952873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.952873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1535.952873] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58304dd6-87dc-4e3f-9eab-04307766c671 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.957968] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1535.957968] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528e388e-9283-6131-6da8-32ec00f7061e" [ 1535.957968] env[63371]: _type = "Task" [ 1535.957968] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.970075] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528e388e-9283-6131-6da8-32ec00f7061e, 'name': SearchDatastore_Task, 'duration_secs': 0.008953} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.970477] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.970559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1535.970782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.973211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.973211] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1535.973211] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00aa3121-9cc8-45fa-b339-d6940110fc08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.978914] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1535.980293] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1535.980293] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40fef0f1-a5c2-4fbb-842f-c6b0df6fe489 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.986816] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1535.986816] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528ef330-659b-114d-43aa-5ae4f31fb44b" [ 1535.986816] env[63371]: _type = "Task" [ 1535.986816] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.989119] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42107181-9fe2-4e33-9505-b00eb2d9ddcb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.996422] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528ef330-659b-114d-43aa-5ae4f31fb44b, 'name': SearchDatastore_Task, 'duration_secs': 0.009241} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.998740] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c99d0a8b-0df9-4066-a9c9-0519afc74921 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.001845] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c6f5f7-770e-4e19-9b61-e05aac6a4066 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.007849] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1536.007849] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c99632-d529-a0d2-d559-d322e89317d8" [ 1536.007849] env[63371]: _type = "Task" [ 1536.007849] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.035748] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f3e366-0a73-416d-b6e3-ca3a27952033 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.045846] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c99632-d529-a0d2-d559-d322e89317d8, 'name': SearchDatastore_Task, 'duration_secs': 0.00847} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.046389] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.046738] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1536.048153] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c94209-c881-4ec4-92da-2e42b5985339 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.053353] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-784cdd07-70b7-4c15-a648-a28547329e74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.069482] env[63371]: DEBUG nova.compute.provider_tree [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.071861] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1536.071861] env[63371]: value = "task-1774127" [ 1536.071861] env[63371]: _type = "Task" [ 1536.071861] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.081889] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.292875] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774126, 'name': Rename_Task, 'duration_secs': 0.297282} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.293224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1536.293486] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e09bb18-55e3-48a7-a97a-cfa47f6a7390 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.300583] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1536.300583] env[63371]: value = "task-1774128" [ 1536.300583] env[63371]: _type = "Task" [ 1536.300583] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.313628] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774128, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.352053] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774124, 'name': ReconfigVM_Task, 'duration_secs': 0.569045} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.352426] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Reconfigured VM instance instance-00000039 to attach disk [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e/d6bc618e-33c9-4b45-b79f-afe6811acd4e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1536.353328] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d50c43f7-3d0f-4ef1-82c7-0ff797c6cf1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.361267] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1536.361267] env[63371]: value = "task-1774129" [ 1536.361267] env[63371]: _type = "Task" [ 1536.361267] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.374385] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774129, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.397172] env[63371]: DEBUG nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1536.574079] env[63371]: DEBUG nova.scheduler.client.report [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1536.579692] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1536.601924] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532993} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.602289] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1536.602511] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1536.605723] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9635367a-1640-4a35-a53c-85f10971897a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.611462] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1536.611462] env[63371]: value = "task-1774130" [ 1536.611462] env[63371]: _type = "Task" [ 1536.611462] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.616589] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1536.616816] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1536.616969] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1536.617195] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1536.617343] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1536.617488] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1536.617691] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1536.617842] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1536.618011] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1536.618179] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1536.618345] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1536.619521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d98f21-9ad2-4c88-a8bb-3646645da778 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.628515] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.631666] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f07be2-69ba-4e97-8d1d-26c775b96a43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.812018] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774128, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.871311] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774129, 'name': Rename_Task, 'duration_secs': 0.304137} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.871690] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1536.871926] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3dd5803-887f-4e57-a80b-397bdb571a03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.879582] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1536.879582] env[63371]: value = "task-1774131" [ 1536.879582] env[63371]: _type = "Task" [ 1536.879582] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.888709] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.923637] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.023069] env[63371]: DEBUG nova.compute.manager [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received event network-changed-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.023317] env[63371]: DEBUG nova.compute.manager [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Refreshing instance network info cache due to event network-changed-cf8050ea-381c-487b-9981-c3f042d673e1. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1537.023559] env[63371]: DEBUG oslo_concurrency.lockutils [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] Acquiring lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.023769] env[63371]: DEBUG oslo_concurrency.lockutils [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] Acquired lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.023878] env[63371]: DEBUG nova.network.neutron [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Refreshing network info cache for port cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.091028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.551s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.091028] env[63371]: INFO nova.compute.manager [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Migrating [ 1537.098238] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.246s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.098715] env[63371]: DEBUG nova.objects.instance [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lazy-loading 'resources' on Instance uuid f8119ade-7018-4ad8-82fe-baa0a6753c64 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1537.127269] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062466} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.128164] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1537.129731] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c2cde4-6206-4f0e-8844-35e2e59f68c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.161085] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1537.162433] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8595e640-e338-4caf-9a2a-8681e2ed6e52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.185470] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1537.185470] env[63371]: value = "task-1774132" [ 1537.185470] env[63371]: _type = "Task" [ 1537.185470] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.189554] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Successfully updated port: d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1537.196276] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774132, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.311014] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774128, 'name': PowerOnVM_Task, 'duration_secs': 0.838877} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.311802] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1537.311802] env[63371]: INFO nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Took 10.40 seconds to spawn the instance on the hypervisor. [ 1537.311802] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1537.312689] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7148668-9aa6-4982-a4b2-ac7785c79d4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.394030] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774131, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.612693] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.612892] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.613093] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1537.695732] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774132, 'name': ReconfigVM_Task, 'duration_secs': 0.319853} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.700226] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1537.701179] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.701310] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.701448] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1537.703028] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d5d6b38-25f9-498c-ac44-3e5946935cd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.709694] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1537.709694] env[63371]: value = "task-1774133" [ 1537.709694] env[63371]: _type = "Task" [ 1537.709694] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.721093] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774133, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.810516] env[63371]: DEBUG nova.network.neutron [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updated VIF entry in instance network info cache for port cf8050ea-381c-487b-9981-c3f042d673e1. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1537.810899] env[63371]: DEBUG nova.network.neutron [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updating instance_info_cache with network_info: [{"id": "cf8050ea-381c-487b-9981-c3f042d673e1", "address": "fa:16:3e:86:04:09", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8050ea-38", "ovs_interfaceid": "cf8050ea-381c-487b-9981-c3f042d673e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.829373] env[63371]: INFO nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Took 46.78 seconds to build instance. [ 1537.889670] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774131, 'name': PowerOnVM_Task, 'duration_secs': 0.624459} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.893653] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1537.893653] env[63371]: INFO nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1537.893653] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1537.893653] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d9d4a4-e904-46d2-8b7f-00d2a3e834f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.009077] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7137fa8a-b7ee-4dc2-a46e-5a52556e6221 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.016503] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166855c3-7982-4f3d-81b5-7ddc13c7d14a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.046722] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e77b080-82e2-4f5c-8d64-64177352dfa1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.054974] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87764988-ac9a-4da6-9f95-b427106a45cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.069964] env[63371]: DEBUG nova.compute.provider_tree [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1538.221774] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774133, 'name': Rename_Task, 'duration_secs': 0.135929} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.222137] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1538.222754] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e065fd82-de84-43e6-84be-1b13ef3408eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.228708] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1538.228708] env[63371]: value = "task-1774134" [ 1538.228708] env[63371]: _type = "Task" [ 1538.228708] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.237186] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1538.243976] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.302210] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Acquiring lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.302341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Acquired lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.302503] env[63371]: DEBUG nova.network.neutron [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1538.315536] env[63371]: DEBUG oslo_concurrency.lockutils [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] Releasing lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.333595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.056s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.382777] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.410064] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updating instance_info_cache with network_info: [{"id": "d233c17c-a3d0-4e06-8087-721a7808298d", "address": "fa:16:3e:5b:85:bf", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd233c17c-a3", "ovs_interfaceid": "d233c17c-a3d0-4e06-8087-721a7808298d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.410814] env[63371]: INFO nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Took 38.18 seconds to build instance. [ 1538.573113] env[63371]: DEBUG nova.scheduler.client.report [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1538.738980] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774134, 'name': PowerOnVM_Task, 'duration_secs': 0.467099} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.739488] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1538.739809] env[63371]: DEBUG nova.compute.manager [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1538.740717] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326ffabc-9943-44ca-a388-67000eb6beb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.836352] env[63371]: DEBUG nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1538.886720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.913083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.913420] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Instance network_info: |[{"id": "d233c17c-a3d0-4e06-8087-721a7808298d", "address": "fa:16:3e:5b:85:bf", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd233c17c-a3", "ovs_interfaceid": "d233c17c-a3d0-4e06-8087-721a7808298d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1538.913866] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.691s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.914184] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:85:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd233c17c-a3d0-4e06-8087-721a7808298d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1538.921886] env[63371]: DEBUG oslo.service.loopingcall [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1538.924444] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1538.924693] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4db4ca4-8aea-4794-a3a5-cbb3a342f356 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.948390] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1538.948390] env[63371]: value = "task-1774135" [ 1538.948390] env[63371]: _type = "Task" [ 1538.948390] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.956197] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774135, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.074614] env[63371]: DEBUG nova.network.neutron [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updating instance_info_cache with network_info: [{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.078521] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.980s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.080267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.885s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.082097] env[63371]: INFO nova.compute.claims [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1539.102187] env[63371]: INFO nova.scheduler.client.report [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleted allocations for instance f8119ade-7018-4ad8-82fe-baa0a6753c64 [ 1539.122157] env[63371]: DEBUG nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Received event network-vif-plugged-d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1539.122157] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.122157] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.122157] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.122706] env[63371]: DEBUG nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] No waiting events found dispatching network-vif-plugged-d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1539.122706] env[63371]: WARNING nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Received unexpected event network-vif-plugged-d233c17c-a3d0-4e06-8087-721a7808298d for instance with vm_state building and task_state spawning. [ 1539.122706] env[63371]: DEBUG nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Received event network-changed-d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1539.122706] env[63371]: DEBUG nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Refreshing instance network info cache due to event network-changed-d233c17c-a3d0-4e06-8087-721a7808298d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1539.122882] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Acquiring lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.123264] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Acquired lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.125718] env[63371]: DEBUG nova.network.neutron [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Refreshing network info cache for port d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1539.257818] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.356078] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.459278] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774135, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.576986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Releasing lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.577204] env[63371]: DEBUG nova.compute.manager [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Inject network info {{(pid=63371) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1539.577461] env[63371]: DEBUG nova.compute.manager [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] network_info to inject: |[{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1539.582911] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Reconfiguring VM instance to set the machine id {{(pid=63371) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1539.583748] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e9037c3-7acc-43d0-b48e-71bda53ea7db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.604641] env[63371]: DEBUG oslo_vmware.api [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Waiting for the task: (returnval){ [ 1539.604641] env[63371]: value = "task-1774136" [ 1539.604641] env[63371]: _type = "Task" [ 1539.604641] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.617960] env[63371]: DEBUG oslo_vmware.api [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Task: {'id': task-1774136, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.618658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.601s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.737438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.737741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.737955] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.738162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.738331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.742374] env[63371]: INFO nova.compute.manager [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Terminating instance [ 1539.746201] env[63371]: DEBUG nova.compute.manager [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1539.746370] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1539.747249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c08917-09c4-4b89-b4de-78c470a39eab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.757143] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1539.757399] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6683cc49-236f-4403-a4fe-6ac9aa2c2354 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.763573] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1539.763573] env[63371]: value = "task-1774137" [ 1539.763573] env[63371]: _type = "Task" [ 1539.763573] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.771902] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774137, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.944395] env[63371]: DEBUG nova.network.neutron [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updated VIF entry in instance network info cache for port d233c17c-a3d0-4e06-8087-721a7808298d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1539.944904] env[63371]: DEBUG nova.network.neutron [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updating instance_info_cache with network_info: [{"id": "d233c17c-a3d0-4e06-8087-721a7808298d", "address": "fa:16:3e:5b:85:bf", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd233c17c-a3", "ovs_interfaceid": "d233c17c-a3d0-4e06-8087-721a7808298d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.964241] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774135, 'name': CreateVM_Task, 'duration_secs': 0.990578} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.964507] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1539.966084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.966554] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.966917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1539.967548] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ffe9340-35fe-4746-9f18-b8c20bf88ac9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.973168] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1539.973168] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522019e2-946d-d608-9ac2-86e86d560b6a" [ 1539.973168] env[63371]: _type = "Task" [ 1539.973168] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.981951] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522019e2-946d-d608-9ac2-86e86d560b6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.013583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.013846] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.014063] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.014339] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.014410] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.016559] env[63371]: INFO nova.compute.manager [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Terminating instance [ 1540.018489] env[63371]: DEBUG nova.compute.manager [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1540.018561] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1540.019427] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11010db-6e37-4330-9003-c48682f7bb2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.027117] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1540.027370] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2997a741-d0f9-4beb-8a1f-c3c4d00ea682 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.033287] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1540.033287] env[63371]: value = "task-1774138" [ 1540.033287] env[63371]: _type = "Task" [ 1540.033287] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.042930] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.114183] env[63371]: DEBUG oslo_vmware.api [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Task: {'id': task-1774136, 'name': ReconfigVM_Task, 'duration_secs': 0.251158} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.114470] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Reconfigured VM instance to set the machine id {{(pid=63371) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1540.275203] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774137, 'name': PowerOffVM_Task, 'duration_secs': 0.256743} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.275528] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1540.275746] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1540.276018] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-418aa394-e8a6-42c2-89bf-070236a1d4ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.353226] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1540.353226] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1540.353226] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Deleting the datastore file [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1540.353226] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a89f8511-c365-4d7d-a1f9-b5ea627f89cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.359169] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1540.359169] env[63371]: value = "task-1774140" [ 1540.359169] env[63371]: _type = "Task" [ 1540.359169] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.368072] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.406103] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f23d8ab-9a86-4e34-b694-9118ece1f9c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.430067] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1540.448500] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Releasing lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.487174] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522019e2-946d-d608-9ac2-86e86d560b6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010063} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.487174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.487174] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1540.487174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.487174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.487174] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1540.487174] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a88001c8-3785-4045-bb46-ad05a9ec5406 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.495843] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1540.496036] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1540.496776] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53c089bc-6b04-434a-82e6-98d5cebc5881 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.501631] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1540.501631] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521cedc0-b273-11d3-c3ba-3e45759062cf" [ 1540.501631] env[63371]: _type = "Task" [ 1540.501631] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.512462] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521cedc0-b273-11d3-c3ba-3e45759062cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.541723] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774138, 'name': PowerOffVM_Task, 'duration_secs': 0.18904} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.544483] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1540.544657] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1540.545071] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81650a86-a120-4512-b759-fae844c3f024 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.566228] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f6b847-b367-4e27-8a13-53a654b028e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.576981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735a2dc8-d3f7-4c09-beb6-07ad32967c0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.610114] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71d4a2a-fcfc-4417-a9f7-19a7839b7c7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.618155] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059915ec-98d3-41be-b4d8-d7074bcc70ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.624039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1540.624253] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1540.624425] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1540.624659] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06219820-e841-455b-8534-bc63ef6cde71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.633915] env[63371]: DEBUG nova.compute.provider_tree [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1540.640435] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1540.640435] env[63371]: value = "task-1774142" [ 1540.640435] env[63371]: _type = "Task" [ 1540.640435] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.650126] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774142, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.828491] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.828718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.870246] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.870519] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1540.870706] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1540.870889] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1540.871231] env[63371]: INFO nova.compute.manager [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1540.871377] env[63371]: DEBUG oslo.service.loopingcall [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.871537] env[63371]: DEBUG nova.compute.manager [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1540.871655] env[63371]: DEBUG nova.network.neutron [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1540.938122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1540.938762] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70e494ce-6bbc-4242-b0aa-3eced46f24f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.946680] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1540.946680] env[63371]: value = "task-1774143" [ 1540.946680] env[63371]: _type = "Task" [ 1540.946680] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.969659] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.013925] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521cedc0-b273-11d3-c3ba-3e45759062cf, 'name': SearchDatastore_Task, 'duration_secs': 0.012642} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.014787] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc0cfb16-fe47-4588-8cb0-a2d7ecace979 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.020088] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1541.020088] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52260ed7-df1f-ab96-579a-69cf8db35b11" [ 1541.020088] env[63371]: _type = "Task" [ 1541.020088] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.027985] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52260ed7-df1f-ab96-579a-69cf8db35b11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.148652] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774142, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168228} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.148909] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1541.149158] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1541.149367] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1541.149569] env[63371]: INFO nova.compute.manager [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1541.149867] env[63371]: DEBUG oslo.service.loopingcall [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1541.150110] env[63371]: DEBUG nova.compute.manager [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1541.150287] env[63371]: DEBUG nova.network.neutron [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1541.164374] env[63371]: ERROR nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [req-bb87a2c2-7514-4a8a-80b8-d3f5a414aa05] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bb87a2c2-7514-4a8a-80b8-d3f5a414aa05"}]} [ 1541.191150] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1541.205387] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1541.205644] env[63371]: DEBUG nova.compute.provider_tree [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1541.224590] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1541.248461] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1541.330792] env[63371]: DEBUG nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1541.352216] env[63371]: DEBUG nova.compute.manager [req-39cf6508-acdb-4fad-9d49-c0e9cfb83db1 req-543a3886-7a24-4456-8914-1157c85438ee service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Received event network-vif-deleted-dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1541.352216] env[63371]: INFO nova.compute.manager [req-39cf6508-acdb-4fad-9d49-c0e9cfb83db1 req-543a3886-7a24-4456-8914-1157c85438ee service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Neutron deleted interface dc1a6185-a139-4788-bbd2-d5540dd42733; detaching it from the instance and deleting it from the info cache [ 1541.352216] env[63371]: DEBUG nova.network.neutron [req-39cf6508-acdb-4fad-9d49-c0e9cfb83db1 req-543a3886-7a24-4456-8914-1157c85438ee service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.466361] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774143, 'name': PowerOffVM_Task, 'duration_secs': 0.274149} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.466361] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1541.466361] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1541.470721] env[63371]: INFO nova.compute.manager [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Rebuilding instance [ 1541.531242] env[63371]: DEBUG nova.compute.manager [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1541.532692] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10c0d1c-6c1a-410c-a61c-f79153adfdfc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.539191] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52260ed7-df1f-ab96-579a-69cf8db35b11, 'name': SearchDatastore_Task, 'duration_secs': 0.016247} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.539764] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.540023] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0/b880750e-7bf4-412c-bcff-eb2c343f60f0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1541.540570] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d454ab1-b3ea-4b41-ae19-a0e0eecf8079 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.550134] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1541.550134] env[63371]: value = "task-1774144" [ 1541.550134] env[63371]: _type = "Task" [ 1541.550134] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.559669] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.588390] env[63371]: DEBUG nova.compute.manager [req-99c533b5-2d8d-4a7c-896e-6529edd72f05 req-3b88459a-5e1d-41df-a541-39621e8ad527 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Received event network-vif-deleted-12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1541.588508] env[63371]: INFO nova.compute.manager [req-99c533b5-2d8d-4a7c-896e-6529edd72f05 req-3b88459a-5e1d-41df-a541-39621e8ad527 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Neutron deleted interface 12bfc72d-5ca7-4f11-8259-77887b5af47c; detaching it from the instance and deleting it from the info cache [ 1541.588677] env[63371]: DEBUG nova.network.neutron [req-99c533b5-2d8d-4a7c-896e-6529edd72f05 req-3b88459a-5e1d-41df-a541-39621e8ad527 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.742081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d686a30b-f4a6-4316-879a-2d2bb301f7d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.751368] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbf7bf3-daf1-46fa-85b5-f33f9dc680c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.786418] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2667f74-13fb-4d4d-90bf-5ad04ba6c5ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.795085] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935649d8-42af-43e3-8ee1-88b2352a9e5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.800194] env[63371]: DEBUG nova.network.neutron [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.813767] env[63371]: DEBUG nova.compute.provider_tree [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.855808] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54030da2-0cde-4f89-ac01-2b5e4275ad10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.858486] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.867931] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b5907c-1020-4de3-a31c-f3f5cbee69af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.907811] env[63371]: DEBUG nova.compute.manager [req-39cf6508-acdb-4fad-9d49-c0e9cfb83db1 req-543a3886-7a24-4456-8914-1157c85438ee service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Detach interface failed, port_id=dc1a6185-a139-4788-bbd2-d5540dd42733, reason: Instance d6bc618e-33c9-4b45-b79f-afe6811acd4e could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1541.975583] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1541.975909] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1541.976138] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1541.976379] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1541.976594] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1541.976810] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1541.977088] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1541.977263] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1541.977430] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1541.977589] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1541.977755] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1541.983394] env[63371]: DEBUG nova.network.neutron [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.984697] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ecb91d4-05fc-494d-a181-c3927b165b61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.004398] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1542.004398] env[63371]: value = "task-1774145" [ 1542.004398] env[63371]: _type = "Task" [ 1542.004398] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.014500] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774145, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.048279] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1542.048631] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cbb2fe5-23da-4a87-bb71-db37bc5bfe7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.061506] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774144, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506425} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.063167] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0/b880750e-7bf4-412c-bcff-eb2c343f60f0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1542.063393] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1542.063704] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1542.063704] env[63371]: value = "task-1774146" [ 1542.063704] env[63371]: _type = "Task" [ 1542.063704] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.063893] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e717e50-4672-4368-9e67-3da30d2f1944 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.076124] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.077591] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1542.077591] env[63371]: value = "task-1774147" [ 1542.077591] env[63371]: _type = "Task" [ 1542.077591] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.096880] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83fa19a1-599f-4813-8755-1e529321a0d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.107638] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31aa1bc4-4548-4721-8d92-33034c4c9093 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.146011] env[63371]: DEBUG nova.compute.manager [req-99c533b5-2d8d-4a7c-896e-6529edd72f05 req-3b88459a-5e1d-41df-a541-39621e8ad527 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Detach interface failed, port_id=12bfc72d-5ca7-4f11-8259-77887b5af47c, reason: Instance 574121c4-c721-4d30-81ec-3f2310a7b6d1 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1542.303280] env[63371]: INFO nova.compute.manager [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Took 1.43 seconds to deallocate network for instance. [ 1542.317667] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1542.446533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.446859] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.497042] env[63371]: INFO nova.compute.manager [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Took 1.35 seconds to deallocate network for instance. [ 1542.515386] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774145, 'name': ReconfigVM_Task, 'duration_secs': 0.280393} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.515711] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1542.578135] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774146, 'name': PowerOffVM_Task, 'duration_secs': 0.289395} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.578228] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1542.578434] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1542.582511] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1679b3d7-5cbe-4e72-a50f-992d75092101 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.590202] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1542.593438] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c66eae19-8852-4c3b-b0a8-18125cb0654b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.595224] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083383} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.595469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1542.596561] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1e51f4-aac5-4f6f-b083-a91fa848f652 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.631301] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0/b880750e-7bf4-412c-bcff-eb2c343f60f0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1542.632010] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be78eb81-afbe-4a91-99a8-78df1e2a8654 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.653044] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1542.653044] env[63371]: value = "task-1774149" [ 1542.653044] env[63371]: _type = "Task" [ 1542.653044] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.661295] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.713450] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1542.713664] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1542.713896] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleting the datastore file [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1542.714778] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c736f2b-fe86-49b2-95a4-3f078c597a12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.722585] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1542.722585] env[63371]: value = "task-1774150" [ 1542.722585] env[63371]: _type = "Task" [ 1542.722585] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.731387] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.810253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.826673] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.746s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.827277] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1542.830127] env[63371]: DEBUG oslo_concurrency.lockutils [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.346s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.004127] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.022258] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:32:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bb3da7ed-b700-420c-a825-23c0d1a3f881',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2130760861',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1543.022589] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1543.022809] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1543.023063] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1543.023293] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1543.023503] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1543.023764] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1543.024113] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1543.024269] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1543.024495] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1543.024725] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1543.030204] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfiguring VM instance instance-00000031 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1543.030783] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b14b07d-2116-4c31-8553-80e45bda4fb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.054771] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1543.054771] env[63371]: value = "task-1774151" [ 1543.054771] env[63371]: _type = "Task" [ 1543.054771] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.066448] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.166297] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774149, 'name': ReconfigVM_Task, 'duration_secs': 0.319374} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.166593] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfigured VM instance instance-0000003a to attach disk [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0/b880750e-7bf4-412c-bcff-eb2c343f60f0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1543.167259] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7fd7c98-b97f-4b05-97c8-d54780797350 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.178105] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1543.178105] env[63371]: value = "task-1774152" [ 1543.178105] env[63371]: _type = "Task" [ 1543.178105] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.191767] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774152, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.934177] env[63371]: DEBUG nova.compute.utils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1543.936089] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158891} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.938939] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1543.939093] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1543.946467] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1543.946665] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1543.946848] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1543.959659] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774152, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.960209] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774151, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.999158] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1cd0e1-b300-4308-a781-9ff3df944977 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.003387] env[63371]: DEBUG nova.policy [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1b2f698ebd747d6a84ac3f3e05e97b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a5b81b233f640b186d9798ff57a4945', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1544.009883] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754543e6-50fd-4b4d-94db-3469bb2b7c87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.042273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd394b80-2a7a-4cea-9ee3-996ce64a70d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.050718] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9c8c90-b3cc-4bbb-be63-ae53bb3a8a52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.065152] env[63371]: DEBUG nova.compute.provider_tree [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.289226] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Successfully created port: bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1544.444876] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1544.458220] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774152, 'name': Rename_Task, 'duration_secs': 0.98931} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.459344] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1544.459619] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774151, 'name': ReconfigVM_Task, 'duration_secs': 1.356435} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.460023] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1e1d70b-6055-4a53-ab75-c00cfc786b5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.461695] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfigured VM instance instance-00000031 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1544.463152] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf11542-7a19-42bc-8bd2-a1e7734c694a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.487223] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1544.488727] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ab55d61-29c3-488e-9b57-cc0339062dd5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.501519] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1544.501519] env[63371]: value = "task-1774153" [ 1544.501519] env[63371]: _type = "Task" [ 1544.501519] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.510188] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1544.510188] env[63371]: value = "task-1774154" [ 1544.510188] env[63371]: _type = "Task" [ 1544.510188] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.524667] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774154, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.567979] env[63371]: DEBUG nova.scheduler.client.report [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1544.871229] env[63371]: INFO nova.compute.manager [None req-5e53a0f1-e096-4b70-87eb-4dc153c65ad4 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Get console output [ 1544.871616] env[63371]: WARNING nova.virt.vmwareapi.driver [None req-5e53a0f1-e096-4b70-87eb-4dc153c65ad4 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] The console log is missing. Check your VSPC configuration [ 1544.989487] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1544.989773] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1544.989877] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.990063] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1544.990231] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.990378] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1544.990578] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1544.990728] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1544.990883] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1544.991104] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1544.991287] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1544.992403] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8a9873-8ad4-48c0-b63c-2c0bff742447 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.001275] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de587a2-e122-4848-8a6c-ff7757421d2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.024157] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:68:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2249de3-2c03-4371-aab4-6173dd2b5d56', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1545.031691] env[63371]: DEBUG oslo.service.loopingcall [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.031950] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774153, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.032503] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1545.033136] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-479b7dd2-f67d-4787-bcfa-0a632d8e0b63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.050264] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774154, 'name': ReconfigVM_Task, 'duration_secs': 0.30551} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.050875] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1545.051193] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1545.056513] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1545.056513] env[63371]: value = "task-1774155" [ 1545.056513] env[63371]: _type = "Task" [ 1545.056513] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.064889] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774155, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.074180] env[63371]: DEBUG oslo_concurrency.lockutils [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.244s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.074720] env[63371]: INFO nova.compute.manager [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Successfully reverted task state from image_uploading on failure for instance. [ 1545.076710] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.157s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.076926] env[63371]: DEBUG nova.objects.instance [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lazy-loading 'resources' on Instance uuid 594ff846-8e3e-4882-8ddc-41f824a77a5c {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-368326' has already been deleted or has not been completely created [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4443, in snapshot_instance [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4476, in _snapshot_instance [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 571, in snapshot [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1033, in snapshot [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 937, in _delete_vm_snapshot [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server self._session._wait_for_task(delete_snapshot_task) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server return self.wait_for_task(task_ref) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server self.f(*self.args, **self.kw) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server raise exceptions.translate_fault(task_info.error) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-368326' has already been deleted or has not been completely created [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server [ 1545.453970] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1545.474553] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1545.474804] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1545.474955] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1545.475180] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1545.475334] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1545.475515] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1545.475721] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1545.475891] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1545.476098] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1545.476276] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1545.476466] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1545.477428] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f0e735-4e00-4fa8-96c1-09f295d2d5a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.489798] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f706d366-9eed-4818-a9e9-c830fd4db0bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.514745] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774153, 'name': PowerOnVM_Task, 'duration_secs': 0.749811} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.515016] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.515216] env[63371]: INFO nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1545.515414] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1545.516148] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8eeffc0-a7b7-4b34-9951-f4e5e8a86086 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.563740] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e856b3bd-7b66-4c98-ada3-985b66266385 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.573099] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774155, 'name': CreateVM_Task, 'duration_secs': 0.360208} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.586750] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1545.590391] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.590755] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.590872] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1545.591715] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89480b4b-4934-4aef-afe7-c85346896963 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.594340] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a950931b-ca9a-4e06-ac9d-df1c4c1480ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.614758] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1545.621584] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1545.621584] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a75a4e-c571-78c9-19d1-4bc080ce5348" [ 1545.621584] env[63371]: _type = "Task" [ 1545.621584] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.632783] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a75a4e-c571-78c9-19d1-4bc080ce5348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.829933] env[63371]: DEBUG nova.compute.manager [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Received event network-vif-plugged-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1545.830252] env[63371]: DEBUG oslo_concurrency.lockutils [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] Acquiring lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.830461] env[63371]: DEBUG oslo_concurrency.lockutils [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.831345] env[63371]: DEBUG oslo_concurrency.lockutils [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.831345] env[63371]: DEBUG nova.compute.manager [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] No waiting events found dispatching network-vif-plugged-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1545.831345] env[63371]: WARNING nova.compute.manager [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Received unexpected event network-vif-plugged-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 for instance with vm_state building and task_state spawning. [ 1545.918915] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Successfully updated port: bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.988799] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "47c1c242-d190-4523-8033-307c5a9b7535" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.989061] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.989273] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "47c1c242-d190-4523-8033-307c5a9b7535-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.989455] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.989629] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.992120] env[63371]: INFO nova.compute.manager [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Terminating instance [ 1545.995949] env[63371]: DEBUG nova.compute.manager [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1545.996084] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1545.996903] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b74d3b-21f7-42dc-8452-3bd986a37e1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.007307] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1546.007550] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78e27e44-3e04-4af7-ab09-02c4d0f95ead {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.014161] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1546.014161] env[63371]: value = "task-1774156" [ 1546.014161] env[63371]: _type = "Task" [ 1546.014161] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.030762] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.034474] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d726d2-4b09-4e3f-904e-6a59dd6c8522 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.037271] env[63371]: INFO nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Took 43.31 seconds to build instance. [ 1546.043156] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db18365-3c6e-4208-b45f-4139d61a9056 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.078615] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b49aa9a-3a36-4973-ac91-d78194ea0df3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.088021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eebe311-5228-4d05-bc23-de002059bec7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.104479] env[63371]: DEBUG nova.compute.provider_tree [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1546.136012] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a75a4e-c571-78c9-19d1-4bc080ce5348, 'name': SearchDatastore_Task, 'duration_secs': 0.035061} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.136351] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.136574] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1546.136810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.136946] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.137135] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.137400] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d1fe410-56a5-4e34-9252-ec1db441e451 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.148499] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.148684] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1546.149580] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3195bfd2-3010-42c9-aa9a-376fc9b295c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.157349] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1546.157349] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528c5ca0-d65f-8e2c-fa46-3c5eacf23f4b" [ 1546.157349] env[63371]: _type = "Task" [ 1546.157349] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.168620] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528c5ca0-d65f-8e2c-fa46-3c5eacf23f4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.169902] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Port e13a7d6d-6643-4b64-a4b1-2a59397c5307 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1546.425362] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.425531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.425673] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1546.525958] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774156, 'name': PowerOffVM_Task, 'duration_secs': 0.185224} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.526308] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1546.526480] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1546.526738] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51dfb172-b8a6-4024-bce5-aff8f10287d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.539177] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.819s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.607929] env[63371]: DEBUG nova.scheduler.client.report [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1546.640908] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1546.641274] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1546.641464] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Deleting the datastore file [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1546.641747] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6451dea1-ba95-42fb-a173-25ee843d0d37 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.648991] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1546.648991] env[63371]: value = "task-1774158" [ 1546.648991] env[63371]: _type = "Task" [ 1546.648991] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.658067] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.667051] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528c5ca0-d65f-8e2c-fa46-3c5eacf23f4b, 'name': SearchDatastore_Task, 'duration_secs': 0.01452} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.667857] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6bacf0d-25a8-4dea-9e1c-afc2c0d4cf9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.677819] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1546.677819] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527f2122-c754-7c27-d16f-6827bafd6c66" [ 1546.677819] env[63371]: _type = "Task" [ 1546.677819] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.688210] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527f2122-c754-7c27-d16f-6827bafd6c66, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.688505] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.688772] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1546.689060] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a774f536-6fa4-470e-bf70-597bd563d880 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.696537] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1546.696537] env[63371]: value = "task-1774159" [ 1546.696537] env[63371]: _type = "Task" [ 1546.696537] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.706456] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.966948] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1547.043467] env[63371]: DEBUG nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1547.113608] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.116660] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.434s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.116955] env[63371]: DEBUG nova.objects.instance [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lazy-loading 'resources' on Instance uuid 76c861a7-30f2-40f4-b723-7912975f36f8 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1547.140292] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Updating instance_info_cache with network_info: [{"id": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "address": "fa:16:3e:36:9a:44", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc802b6c-1a", "ovs_interfaceid": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.145736] env[63371]: INFO nova.scheduler.client.report [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted allocations for instance 594ff846-8e3e-4882-8ddc-41f824a77a5c [ 1547.163376] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143051} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.163741] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1547.163986] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1547.164243] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1547.164472] env[63371]: INFO nova.compute.manager [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1547.164789] env[63371]: DEBUG oslo.service.loopingcall [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.165063] env[63371]: DEBUG nova.compute.manager [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1547.165212] env[63371]: DEBUG nova.network.neutron [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1547.198858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.199255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.199822] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.212132] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490394} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.212403] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1547.212617] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1547.213278] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31a724a2-9538-4b87-a215-92d43c6bc505 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.222080] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1547.222080] env[63371]: value = "task-1774160" [ 1547.222080] env[63371]: _type = "Task" [ 1547.222080] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.232733] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774160, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.567852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.646335] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.646335] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Instance network_info: |[{"id": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "address": "fa:16:3e:36:9a:44", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc802b6c-1a", "ovs_interfaceid": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1547.649995] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:9a:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc802b6c-1a40-491b-8222-aa71e5d0bcd3', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1547.658581] env[63371]: DEBUG oslo.service.loopingcall [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.660537] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1547.661082] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.264s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.661904] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecbd4afa-7bb8-49f5-ae25-d258e5326014 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.696062] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1547.696062] env[63371]: value = "task-1774161" [ 1547.696062] env[63371]: _type = "Task" [ 1547.696062] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.708118] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.708387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.714833] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774161, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.732310] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774160, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.735309] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1547.736833] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1547ecb-9e01-45fc-b82f-ae9dcd04fffb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.759991] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1547.763229] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41442ded-6fe7-43e9-87e2-9b84758e451d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.786339] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1547.786339] env[63371]: value = "task-1774162" [ 1547.786339] env[63371]: _type = "Task" [ 1547.786339] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.802343] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774162, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.107197] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a6eca4-d7a5-4edc-a0c6-86e401626099 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.112447] env[63371]: DEBUG nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Received event network-changed-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.112627] env[63371]: DEBUG nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Refreshing instance network info cache due to event network-changed-bc802b6c-1a40-491b-8222-aa71e5d0bcd3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1548.112836] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] Acquiring lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.112975] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] Acquired lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.113152] env[63371]: DEBUG nova.network.neutron [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Refreshing network info cache for port bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1548.119965] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb56610-eae2-4f28-9da1-9fcbf5e744b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.151877] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cf6419-1b28-421f-82ec-29638d31c96b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.160010] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1152afc3-6fe9-4849-9cbd-48892bf9cf17 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.175797] env[63371]: DEBUG nova.compute.provider_tree [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.189748] env[63371]: DEBUG nova.network.neutron [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.210676] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774161, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.216377] env[63371]: DEBUG nova.compute.utils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1548.271857] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.271857] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.271857] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1548.299315] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.679069] env[63371]: DEBUG nova.scheduler.client.report [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1548.692795] env[63371]: INFO nova.compute.manager [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Took 1.53 seconds to deallocate network for instance. [ 1548.712139] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774161, 'name': CreateVM_Task, 'duration_secs': 0.967257} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.712586] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1548.713353] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.713521] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.713786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1548.714064] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-771fd3c5-695c-43e8-9a56-c9eedcb36fcf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.719732] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.720910] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1548.720910] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f821d4-3cf5-3a0e-171d-ad841e8d0f50" [ 1548.720910] env[63371]: _type = "Task" [ 1548.720910] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.732582] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f821d4-3cf5-3a0e-171d-ad841e8d0f50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.797473] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774162, 'name': ReconfigVM_Task, 'duration_secs': 0.972661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.797700] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfigured VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1548.798358] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83c26eb6-ecc5-45d3-aa59-57033fce462f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.805821] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1548.805821] env[63371]: value = "task-1774163" [ 1548.805821] env[63371]: _type = "Task" [ 1548.805821] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.817436] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774163, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.903098] env[63371]: DEBUG nova.network.neutron [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Updated VIF entry in instance network info cache for port bc802b6c-1a40-491b-8222-aa71e5d0bcd3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1548.903602] env[63371]: DEBUG nova.network.neutron [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Updating instance_info_cache with network_info: [{"id": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "address": "fa:16:3e:36:9a:44", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc802b6c-1a", "ovs_interfaceid": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.987413] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.184455] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.186712] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.485s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.188220] env[63371]: INFO nova.compute.claims [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1549.205298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.209221] env[63371]: INFO nova.scheduler.client.report [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleted allocations for instance 76c861a7-30f2-40f4-b723-7912975f36f8 [ 1549.232417] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f821d4-3cf5-3a0e-171d-ad841e8d0f50, 'name': SearchDatastore_Task, 'duration_secs': 0.015429} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.233039] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.233137] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1549.233459] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.233655] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.233853] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1549.234886] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac4b50c2-e3c7-4570-a7eb-59e30ce3194c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.245843] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1549.246064] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1549.246818] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8acf8e9b-38c1-4918-8807-1a14b57506c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.253763] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1549.253763] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523b1f5d-660d-0385-4923-e87b93946ad2" [ 1549.253763] env[63371]: _type = "Task" [ 1549.253763] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.262747] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523b1f5d-660d-0385-4923-e87b93946ad2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.321213] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774163, 'name': Rename_Task, 'duration_secs': 0.462631} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.321213] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1549.321213] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de83065a-3e37-455f-a66d-f2851d1341ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.328019] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1549.328019] env[63371]: value = "task-1774164" [ 1549.328019] env[63371]: _type = "Task" [ 1549.328019] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.335222] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.406772] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] Releasing lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.406772] env[63371]: DEBUG nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received event network-vif-deleted-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1549.406772] env[63371]: INFO nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Neutron deleted interface a2807b8c-5895-474a-9c75-58bd21982409; detaching it from the instance and deleting it from the info cache [ 1549.406772] env[63371]: DEBUG nova.network.neutron [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.490027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.717297] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.892s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.765585] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523b1f5d-660d-0385-4923-e87b93946ad2, 'name': SearchDatastore_Task, 'duration_secs': 0.012472} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.766418] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5fae500-6d29-44ba-9c91-fb93046dc16c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.772498] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1549.772498] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5237fd57-b607-b9dc-2a51-33ee49548381" [ 1549.772498] env[63371]: _type = "Task" [ 1549.772498] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.781044] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5237fd57-b607-b9dc-2a51-33ee49548381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.784739] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.784973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.785215] env[63371]: INFO nova.compute.manager [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Attaching volume 31062dc6-9857-475c-b6b3-4e33c4ca4a59 to /dev/sdb [ 1549.816661] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2774405a-22f4-4922-b6f1-36a6db059490 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.825024] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0480e6c3-547d-4b05-9d66-71e649c4996a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.838207] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774164, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.845032] env[63371]: DEBUG nova.virt.block_device [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updating existing volume attachment record: 8b4518a1-5a2f-4731-8e95-cba3f5d3743c {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1549.909680] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a57dcaf0-671a-4e1d-b968-cf6891bfea46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.919669] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26e2523-645d-4c8d-afeb-dcc24e50aee0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.953515] env[63371]: DEBUG nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Detach interface failed, port_id=a2807b8c-5895-474a-9c75-58bd21982409, reason: Instance 47c1c242-d190-4523-8033-307c5a9b7535 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1550.016423] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad3e544-6fcb-4fa8-8e9b-5df5a323e01a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.036705] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d458beab-3910-46ec-a03d-a34d1ad5d425 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.043532] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1550.283396] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5237fd57-b607-b9dc-2a51-33ee49548381, 'name': SearchDatastore_Task, 'duration_secs': 0.011795} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.287213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.287706] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04/704978f9-3b24-4a73-8f64-b8e3e9e94a04.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1550.288343] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0367c34-5dbd-468c-b123-a65394cce285 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.300152] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1550.300152] env[63371]: value = "task-1774168" [ 1550.300152] env[63371]: _type = "Task" [ 1550.300152] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.311557] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.341917] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774164, 'name': PowerOnVM_Task, 'duration_secs': 0.553615} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.342217] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1550.342428] env[63371]: DEBUG nova.compute.manager [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1550.343255] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a783730e-92ae-4f7f-ad49-a03c1ba0c164 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.551154] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1550.551856] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fe72bc8-acf2-4b86-86ae-f9a85127bd24 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.562667] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1550.562667] env[63371]: value = "task-1774169" [ 1550.562667] env[63371]: _type = "Task" [ 1550.562667] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.576170] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.664973] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc4d22e-a6cf-459e-96c5-f3c3ae3d206e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.675402] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d794ee9f-6f8a-494b-a37b-f938bc176ad8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.714623] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801acafc-bbe9-4596-a4dd-b8b3caee39be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.725019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629c915e-afe4-4382-86e6-8d53c028e3f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.746571] env[63371]: DEBUG nova.compute.provider_tree [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.810772] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498505} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.811093] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04/704978f9-3b24-4a73-8f64-b8e3e9e94a04.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1550.811327] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1550.811668] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-181283ed-909e-4d40-a6bc-1ef927bc1f0e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.820712] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1550.820712] env[63371]: value = "task-1774170" [ 1550.820712] env[63371]: _type = "Task" [ 1550.820712] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.831189] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.863940] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.076693] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774169, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.250333] env[63371]: DEBUG nova.scheduler.client.report [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1551.266016] env[63371]: INFO nova.compute.manager [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Rebuilding instance [ 1551.325510] env[63371]: DEBUG nova.compute.manager [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1551.326292] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62d5e12-c85f-44c8-aaf5-39a9f6cb8a49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.338188] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071096} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.338624] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1551.339421] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d53dd4-07b5-4022-a599-8f32b096e3e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.365287] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04/704978f9-3b24-4a73-8f64-b8e3e9e94a04.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1551.365931] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-326d9f29-20da-4135-9859-1271684f356f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.392515] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1551.392515] env[63371]: value = "task-1774171" [ 1551.392515] env[63371]: _type = "Task" [ 1551.392515] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.405360] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774171, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.577844] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774169, 'name': PowerOnVM_Task, 'duration_secs': 0.546951} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.578146] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1551.578334] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1551.756624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.757226] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1551.761658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.789s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.764783] env[63371]: INFO nova.compute.claims [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1551.840354] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1551.840686] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d86f54d-4fc6-4f7d-aa71-d8eb709dd057 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.848442] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1551.848442] env[63371]: value = "task-1774172" [ 1551.848442] env[63371]: _type = "Task" [ 1551.848442] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.858967] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.903337] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774171, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.272888] env[63371]: DEBUG nova.compute.utils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1552.275288] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1552.275640] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1552.363972] env[63371]: DEBUG nova.policy [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '205b9986577149cca5f5102f89f7283f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8973623e406e4ab699162499116ac8d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1552.365492] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774172, 'name': PowerOffVM_Task, 'duration_secs': 0.428093} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.365813] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1552.366123] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1552.367876] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125eafc3-6c4c-4e5a-919d-9e6d3d9d851e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.378384] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1552.379025] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b84743ce-f6fc-4365-8d58-5e5111ee9740 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.403325] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774171, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.684166] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Successfully created port: dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1552.780874] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1552.904841] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774171, 'name': ReconfigVM_Task, 'duration_secs': 1.096264} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.915825] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04/704978f9-3b24-4a73-8f64-b8e3e9e94a04.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1552.915825] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2a025b8-a466-47f2-9540-e7997e53c45b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.925991] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1552.925991] env[63371]: value = "task-1774175" [ 1552.925991] env[63371]: _type = "Task" [ 1552.925991] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.931547] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1552.931547] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1552.931547] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleting the datastore file [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1552.937024] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14c2022e-65e1-4ad0-86c4-63e9736a726f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.943252] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774175, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.945260] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1552.945260] env[63371]: value = "task-1774176" [ 1552.945260] env[63371]: _type = "Task" [ 1552.945260] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.961584] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.236131] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869bc505-bdd6-45e2-b76e-5a60f4a8dab5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.244557] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f71ed35-9492-4559-a382-56468598e35e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.275679] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0798b27-ef25-46ff-ac26-a52b662ae177 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.284021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67aaf2d2-41d3-4a28-b4e3-c97d08518d8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.301418] env[63371]: DEBUG nova.compute.provider_tree [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1553.441567] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774175, 'name': Rename_Task, 'duration_secs': 0.243259} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.441888] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1553.442217] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5d2aeb6-d01d-4d22-9589-2a82532f3bd2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.449149] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1553.449149] env[63371]: value = "task-1774177" [ 1553.449149] env[63371]: _type = "Task" [ 1553.449149] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.466354] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.468322] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.468587] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1553.468800] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1553.469037] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1553.601793] env[63371]: DEBUG nova.network.neutron [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Port e13a7d6d-6643-4b64-a4b1-2a59397c5307 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1553.602208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.602483] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.602483] env[63371]: DEBUG nova.network.neutron [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1553.792843] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1553.805154] env[63371]: DEBUG nova.scheduler.client.report [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1553.821147] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1553.821416] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1553.821570] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1553.822110] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1553.822340] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1553.822501] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1553.822710] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1553.822867] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1553.823044] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1553.823216] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1553.823386] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1553.824283] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf98c642-640a-4358-9009-3093e0d3dcae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.833497] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55c8e71-a57a-4888-9b46-d931aa9a24dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.967175] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774177, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.287262] env[63371]: DEBUG nova.compute.manager [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Received event network-vif-plugged-dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1554.287609] env[63371]: DEBUG oslo_concurrency.lockutils [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] Acquiring lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.287935] env[63371]: DEBUG oslo_concurrency.lockutils [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.288238] env[63371]: DEBUG oslo_concurrency.lockutils [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.288537] env[63371]: DEBUG nova.compute.manager [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] No waiting events found dispatching network-vif-plugged-dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1554.288833] env[63371]: WARNING nova.compute.manager [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Received unexpected event network-vif-plugged-dbbac158-9444-441f-b15b-2a793507b64f for instance with vm_state building and task_state spawning. [ 1554.310136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.310858] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1554.315718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.302s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.316507] env[63371]: DEBUG nova.objects.instance [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lazy-loading 'resources' on Instance uuid aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1554.384957] env[63371]: DEBUG nova.network.neutron [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.392262] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1554.392496] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368367', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'name': 'volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b880750e-7bf4-412c-bcff-eb2c343f60f0', 'attached_at': '', 'detached_at': '', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'serial': '31062dc6-9857-475c-b6b3-4e33c4ca4a59'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1554.393399] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4745c310-69f9-4295-9169-c05981fbec14 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.410967] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0851e4f9-8e49-4116-b4c2-5ac81817e799 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.414795] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Successfully updated port: dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1554.444882] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59/volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1554.445506] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0e81cf2-21cf-4cda-97b5-a30981782b59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.467774] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774177, 'name': PowerOnVM_Task, 'duration_secs': 0.587992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.469077] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1554.469291] env[63371]: INFO nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1554.469473] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1554.469805] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1554.469805] env[63371]: value = "task-1774178" [ 1554.469805] env[63371]: _type = "Task" [ 1554.469805] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.470511] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ffb79e-0d4c-41cd-8f93-b33c51eb46d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.484112] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774178, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.506246] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1554.507466] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1554.509079] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1554.509079] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1554.509079] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1554.509079] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1554.509335] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f6d336-2b5c-4eee-84ac-5fb2a805bc48 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.518501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503d78d2-d725-48f5-9b28-0bcb702d863e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.534056] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:68:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2249de3-2c03-4371-aab4-6173dd2b5d56', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1554.541656] env[63371]: DEBUG oslo.service.loopingcall [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1554.542182] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1554.542412] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c318b557-7814-4fdd-864b-a38a5948bd93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.565604] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1554.565604] env[63371]: value = "task-1774179" [ 1554.565604] env[63371]: _type = "Task" [ 1554.565604] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.574473] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774179, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.822067] env[63371]: DEBUG nova.compute.utils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1554.823804] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1554.823990] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1554.876014] env[63371]: DEBUG nova.policy [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd08b55f9fa3a45b0a8672e955ee360c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ceecd2a995cf4da0b4218e371065ca0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1554.888109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.916639] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.916806] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquired lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.916942] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1555.000235] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774178, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.005501] env[63371]: INFO nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Took 43.83 seconds to build instance. [ 1555.078630] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774179, 'name': CreateVM_Task} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.078852] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1555.079687] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.079853] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.080181] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1555.080485] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acd1d4d3-56a6-4ee3-a3f1-9bb6f8e84c79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.087933] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1555.087933] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52234a5a-d9fb-f39c-bbfe-a60672ec7c4c" [ 1555.087933] env[63371]: _type = "Task" [ 1555.087933] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.097839] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52234a5a-d9fb-f39c-bbfe-a60672ec7c4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.214805] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Successfully created port: 82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1555.254016] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54f2a11-e054-4cd7-8188-2537e60a05ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.261565] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7219ce9-d898-4369-8c02-5c2224d15254 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.297209] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9acae5-8679-4f16-b303-9ba99188ad93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.301045] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674c9bee-ed58-4232-9637-2dae60ea849f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.307836] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Suspending the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1555.310467] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8920a70b-fbf3-4137-892f-d07d4e724f49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.313180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854c09cb-7dd6-4c0f-8825-09c0c43b3f7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.327427] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1555.330515] env[63371]: DEBUG nova.compute.provider_tree [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.333431] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1555.333431] env[63371]: value = "task-1774180" [ 1555.333431] env[63371]: _type = "Task" [ 1555.333431] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.343184] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774180, 'name': SuspendVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.392209] env[63371]: DEBUG nova.compute.manager [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63371) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1555.392209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.484614] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774178, 'name': ReconfigVM_Task, 'duration_secs': 0.590201} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.484948] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfigured VM instance instance-0000003a to attach disk [datastore1] volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59/volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1555.490042] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8166191b-5053-41af-8efe-46a261e16090 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.504578] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1555.507541] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.346s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.510682] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1555.510682] env[63371]: value = "task-1774181" [ 1555.510682] env[63371]: _type = "Task" [ 1555.510682] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.520912] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774181, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.604657] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52234a5a-d9fb-f39c-bbfe-a60672ec7c4c, 'name': SearchDatastore_Task, 'duration_secs': 0.015824} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.605285] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.605285] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1555.605394] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.606031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.606031] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1555.606031] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35eb6527-200a-4199-85dd-f343a3fde0d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.618832] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1555.619256] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1555.620335] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3ffb871-3d75-4c18-8f8c-e425b363518d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.630653] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1555.630653] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f4e86-97cf-3751-32fe-7cd0d2fc1203" [ 1555.630653] env[63371]: _type = "Task" [ 1555.630653] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.647312] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522f4e86-97cf-3751-32fe-7cd0d2fc1203, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.685742] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Updating instance_info_cache with network_info: [{"id": "dbbac158-9444-441f-b15b-2a793507b64f", "address": "fa:16:3e:49:5f:36", "network": {"id": "7b14192c-e7bb-428c-a24f-7334644bfb47", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-43156726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8973623e406e4ab699162499116ac8d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbac158-94", "ovs_interfaceid": "dbbac158-9444-441f-b15b-2a793507b64f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.838428] env[63371]: DEBUG nova.scheduler.client.report [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1555.852161] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774180, 'name': SuspendVM_Task} progress is 87%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.021434] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774181, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.141767] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522f4e86-97cf-3751-32fe-7cd0d2fc1203, 'name': SearchDatastore_Task, 'duration_secs': 0.095956} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.142726] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73ecd81a-b760-42d0-ac82-79e6b3efd802 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.148302] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1556.148302] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9fd9a-ce37-2d8f-08e9-555db9d80085" [ 1556.148302] env[63371]: _type = "Task" [ 1556.148302] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.156869] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9fd9a-ce37-2d8f-08e9-555db9d80085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.191274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Releasing lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.191751] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Instance network_info: |[{"id": "dbbac158-9444-441f-b15b-2a793507b64f", "address": "fa:16:3e:49:5f:36", "network": {"id": "7b14192c-e7bb-428c-a24f-7334644bfb47", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-43156726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8973623e406e4ab699162499116ac8d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbac158-94", "ovs_interfaceid": "dbbac158-9444-441f-b15b-2a793507b64f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1556.192272] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:5f:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbbac158-9444-441f-b15b-2a793507b64f', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1556.200864] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Creating folder: Project (8973623e406e4ab699162499116ac8d1). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1556.201164] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9843764-9154-46fb-bf47-0c98327d4074 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.214425] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Created folder: Project (8973623e406e4ab699162499116ac8d1) in parent group-v368199. [ 1556.214781] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Creating folder: Instances. Parent ref: group-v368369. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1556.215176] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abc2b75b-cfaa-4171-809a-ca26ecc0161a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.227294] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Created folder: Instances in parent group-v368369. [ 1556.230039] env[63371]: DEBUG oslo.service.loopingcall [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.230039] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1556.230039] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6db8288d-4389-4f5c-98be-b986675b3286 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.254396] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1556.254396] env[63371]: value = "task-1774184" [ 1556.254396] env[63371]: _type = "Task" [ 1556.254396] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.262757] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774184, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.314335] env[63371]: DEBUG nova.compute.manager [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Received event network-changed-dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1556.314554] env[63371]: DEBUG nova.compute.manager [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Refreshing instance network info cache due to event network-changed-dbbac158-9444-441f-b15b-2a793507b64f. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1556.314755] env[63371]: DEBUG oslo_concurrency.lockutils [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] Acquiring lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.314897] env[63371]: DEBUG oslo_concurrency.lockutils [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] Acquired lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.315069] env[63371]: DEBUG nova.network.neutron [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Refreshing network info cache for port dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1556.344151] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1556.354388] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.356381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.240s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.356381] env[63371]: DEBUG nova.objects.instance [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lazy-loading 'resources' on Instance uuid e0369f27-68ea-49c4-8524-3dbbb3cde96e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1556.364619] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774180, 'name': SuspendVM_Task} progress is 87%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.376666] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1556.376943] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1556.377169] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1556.377390] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1556.377551] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1556.377703] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1556.377908] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1556.378102] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1556.378373] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1556.378582] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1556.378761] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1556.379707] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3febc60d-bf37-49a7-885c-76a56ebd6425 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.383127] env[63371]: INFO nova.scheduler.client.report [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Deleted allocations for instance aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf [ 1556.392195] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83c0c41-1b56-4537-9284-bdd786edb34e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.523496] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774181, 'name': ReconfigVM_Task, 'duration_secs': 0.538754} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.523781] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368367', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'name': 'volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b880750e-7bf4-412c-bcff-eb2c343f60f0', 'attached_at': '', 'detached_at': '', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'serial': '31062dc6-9857-475c-b6b3-4e33c4ca4a59'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1556.659590] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9fd9a-ce37-2d8f-08e9-555db9d80085, 'name': SearchDatastore_Task, 'duration_secs': 0.043448} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.659854] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.660162] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1556.660403] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a55605a6-de03-42c9-83d4-8557c85285e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.669222] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1556.669222] env[63371]: value = "task-1774185" [ 1556.669222] env[63371]: _type = "Task" [ 1556.669222] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.679176] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774185, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.769016] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774184, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.809916] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Successfully updated port: 82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1556.852691] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774180, 'name': SuspendVM_Task, 'duration_secs': 1.508934} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.855086] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Suspended the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1556.855242] env[63371]: DEBUG nova.compute.manager [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1556.856069] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ceb4570-88bc-4ab8-8b74-fe95a5c68db8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.897858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.500s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.076356] env[63371]: DEBUG nova.network.neutron [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Updated VIF entry in instance network info cache for port dbbac158-9444-441f-b15b-2a793507b64f. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1557.076727] env[63371]: DEBUG nova.network.neutron [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Updating instance_info_cache with network_info: [{"id": "dbbac158-9444-441f-b15b-2a793507b64f", "address": "fa:16:3e:49:5f:36", "network": {"id": "7b14192c-e7bb-428c-a24f-7334644bfb47", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-43156726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8973623e406e4ab699162499116ac8d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbac158-94", "ovs_interfaceid": "dbbac158-9444-441f-b15b-2a793507b64f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.183730] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774185, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.237946] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c847b7a3-2907-44b5-8b61-3806766c4db0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.246189] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178d4e18-903b-4d1b-8b10-c84fec7e8dac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.279638] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46571ed5-7bd5-4edb-a48d-ef653df92f52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.289847] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774184, 'name': CreateVM_Task, 'duration_secs': 0.854823} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.289904] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1557.291230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d584fa3-eef5-4752-8780-4195f07a8cc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.295386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.295597] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.295865] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1557.296125] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fade93bb-3ed4-41ca-9343-43a064eb1292 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.308758] env[63371]: DEBUG nova.compute.provider_tree [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1557.311078] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1557.311078] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52462d12-700c-bb75-1d9a-bda42bc6ff9a" [ 1557.311078] env[63371]: _type = "Task" [ 1557.311078] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.314646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.314760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.314906] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1557.322225] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52462d12-700c-bb75-1d9a-bda42bc6ff9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.567634] env[63371]: DEBUG nova.objects.instance [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'flavor' on Instance uuid b880750e-7bf4-412c-bcff-eb2c343f60f0 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1557.579375] env[63371]: DEBUG oslo_concurrency.lockutils [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] Releasing lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.684051] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774185, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.812474] env[63371]: DEBUG nova.scheduler.client.report [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1557.830824] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52462d12-700c-bb75-1d9a-bda42bc6ff9a, 'name': SearchDatastore_Task, 'duration_secs': 0.049179} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.831234] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.831469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1557.831740] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.832514] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.832514] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1557.832916] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed722591-03e5-42b5-80f2-3769beb0c6ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.845859] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1557.846686] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1557.847485] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0513607e-f48e-4045-90c3-5b67c7298452 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.854087] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1557.854087] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d33f30-23aa-36fa-b034-16ce87693664" [ 1557.854087] env[63371]: _type = "Task" [ 1557.854087] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.864141] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d33f30-23aa-36fa-b034-16ce87693664, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.875327] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1558.038876] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.073199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.288s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.183330] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774185, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.178396} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.183590] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1558.183805] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1558.184060] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6d04a9c-fcfe-4384-aa09-de8d2083aea5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.191798] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1558.191798] env[63371]: value = "task-1774186" [ 1558.191798] env[63371]: _type = "Task" [ 1558.191798] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.203341] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774186, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.324116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.965s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.324116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.361s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.326978] env[63371]: INFO nova.compute.claims [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1558.347213] env[63371]: DEBUG nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Received event network-vif-plugged-82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1558.347213] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.347213] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.347213] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.347213] env[63371]: DEBUG nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] No waiting events found dispatching network-vif-plugged-82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1558.347733] env[63371]: WARNING nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Received unexpected event network-vif-plugged-82aece5e-dc40-4c18-a1a9-4b4e859fef2a for instance with vm_state building and task_state spawning. [ 1558.348094] env[63371]: DEBUG nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Received event network-changed-82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1558.348409] env[63371]: DEBUG nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Refreshing instance network info cache due to event network-changed-82aece5e-dc40-4c18-a1a9-4b4e859fef2a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1558.348707] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.351414] env[63371]: INFO nova.scheduler.client.report [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleted allocations for instance e0369f27-68ea-49c4-8524-3dbbb3cde96e [ 1558.369452] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.372018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.372018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.372018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.372018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.372659] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d33f30-23aa-36fa-b034-16ce87693664, 'name': SearchDatastore_Task, 'duration_secs': 0.03301} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.374648] env[63371]: INFO nova.compute.manager [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Terminating instance [ 1558.376112] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18d01a29-defe-4a10-93a8-9c8d8f26549a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.380297] env[63371]: DEBUG nova.compute.manager [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1558.380605] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1558.380931] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca402362-6a29-4a38-a9a1-9621472f00c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.394227] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1558.394227] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521fa4fb-d465-aee0-c1e5-44a4bfb935b9" [ 1558.394227] env[63371]: _type = "Task" [ 1558.394227] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.394227] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1558.394227] env[63371]: value = "task-1774187" [ 1558.394227] env[63371]: _type = "Task" [ 1558.394227] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.410026] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521fa4fb-d465-aee0-c1e5-44a4bfb935b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.410880] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.542392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.546186] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Instance network_info: |[{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1558.546538] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.546724] env[63371]: DEBUG nova.network.neutron [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Refreshing network info cache for port 82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1558.548902] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:09:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3a0ddd7d-c321-4187-bdd8-b19044ea2c4a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82aece5e-dc40-4c18-a1a9-4b4e859fef2a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1558.557809] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating folder: Project (ceecd2a995cf4da0b4218e371065ca0b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1558.558266] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d59fb624-e29e-4fc9-995e-ae9b081fec4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.572100] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created folder: Project (ceecd2a995cf4da0b4218e371065ca0b) in parent group-v368199. [ 1558.572407] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating folder: Instances. Parent ref: group-v368372. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1558.572568] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e96ed30-254e-43aa-aa3c-b772a1337c9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.587298] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created folder: Instances in parent group-v368372. [ 1558.587298] env[63371]: DEBUG oslo.service.loopingcall [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.587423] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1558.587615] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3828701a-991f-4c12-8ad8-f2f735254f17 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.611553] env[63371]: DEBUG nova.compute.manager [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1558.612473] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3048daee-6f57-4102-b1b3-2d04fff8e91e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.619435] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1558.619435] env[63371]: value = "task-1774190" [ 1558.619435] env[63371]: _type = "Task" [ 1558.619435] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.632448] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.707498] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774186, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133602} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.707498] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1558.707723] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91484889-6f1d-4410-9d4a-494d6bbf3dde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.735908] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1558.736304] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8d64909-8a29-4231-af99-31630355a1f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.760056] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1558.760056] env[63371]: value = "task-1774191" [ 1558.760056] env[63371]: _type = "Task" [ 1558.760056] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.765433] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774191, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.864384] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.173s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.910944] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774187, 'name': PowerOffVM_Task, 'duration_secs': 0.365629} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.914528] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1558.914746] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1558.914941] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368367', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'name': 'volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b880750e-7bf4-412c-bcff-eb2c343f60f0', 'attached_at': '', 'detached_at': '', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'serial': '31062dc6-9857-475c-b6b3-4e33c4ca4a59'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1558.915336] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521fa4fb-d465-aee0-c1e5-44a4bfb935b9, 'name': SearchDatastore_Task, 'duration_secs': 0.022318} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.916059] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7309679f-a4ce-4403-b444-ffb2ede76471 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.918496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.918752] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e05c7187-b4d6-481e-8bce-deb557dde6a8/e05c7187-b4d6-481e-8bce-deb557dde6a8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1558.918994] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f201be5e-035e-428d-97ae-66b5a3b206ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.947672] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195dc7e2-c470-4cef-94e7-0c891cea03dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.950819] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1558.950819] env[63371]: value = "task-1774192" [ 1558.950819] env[63371]: _type = "Task" [ 1558.950819] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.956630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b0947a-9f89-4eb7-81fe-605c4a9124de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.962013] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.987362] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a53d80-1279-44f0-9c46-ebdb1587ba40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.008827] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] The volume has not been displaced from its original location: [datastore1] volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59/volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1559.014078] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfiguring VM instance instance-0000003a to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1559.014429] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97e928e6-ea1b-4a9e-a425-49c7be4f0817 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.034403] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1559.034403] env[63371]: value = "task-1774193" [ 1559.034403] env[63371]: _type = "Task" [ 1559.034403] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.045289] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.128288] env[63371]: INFO nova.compute.manager [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] instance snapshotting [ 1559.128509] env[63371]: WARNING nova.compute.manager [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1559.133411] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.134509] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43778b0-18be-42c3-9c0a-6c185b4b46d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.153180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d44a661-c2fa-462b-9c15-81bac8fe5b81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.267177] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.473932] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774192, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.475340] env[63371]: DEBUG nova.network.neutron [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updated VIF entry in instance network info cache for port 82aece5e-dc40-4c18-a1a9-4b4e859fef2a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1559.475585] env[63371]: DEBUG nova.network.neutron [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.548197] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.636122] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.666421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1559.667100] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bc312f12-22c4-49f5-9b8c-cfb14d1fbf81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.677497] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1559.677497] env[63371]: value = "task-1774194" [ 1559.677497] env[63371]: _type = "Task" [ 1559.677497] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.694132] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774194, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.773018] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.837245] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e872a07e-5e8b-4898-a425-1a5fe32e5edb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.851060] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0543b89b-6cac-45b7-966f-c26a33106e29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.909403] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a897c7-858b-4b2a-b3db-cc3e5d1b7ebd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.919230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442cc1f-1b4b-45e8-a8ac-7b3e916b5b18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.937718] env[63371]: DEBUG nova.compute.provider_tree [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.963792] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774192, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.004232} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.964134] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e05c7187-b4d6-481e-8bce-deb557dde6a8/e05c7187-b4d6-481e-8bce-deb557dde6a8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1559.964662] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1559.964662] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5642c582-483b-4da8-854a-3668d6424fdb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.973677] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1559.973677] env[63371]: value = "task-1774195" [ 1559.973677] env[63371]: _type = "Task" [ 1559.973677] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.984409] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.985154] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.046787] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774193, 'name': ReconfigVM_Task, 'duration_secs': 0.981522} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.047434] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfigured VM instance instance-0000003a to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1560.054510] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8e5c34b-eb3c-4506-83dd-185c6188e659 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.070440] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1560.070440] env[63371]: value = "task-1774196" [ 1560.070440] env[63371]: _type = "Task" [ 1560.070440] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.080537] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.135965] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.190948] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774194, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.271610] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774191, 'name': ReconfigVM_Task, 'duration_secs': 1.394321} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.271903] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfigured VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1560.272756] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e1272ca-837d-48c2-96a7-b416b0bb842f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.280305] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1560.280305] env[63371]: value = "task-1774197" [ 1560.280305] env[63371]: _type = "Task" [ 1560.280305] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.289490] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774197, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.440316] env[63371]: DEBUG nova.scheduler.client.report [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1560.484659] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.176642} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.484936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1560.485999] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b32a78d-51d0-4666-a1b7-7c08f19412e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.510160] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] e05c7187-b4d6-481e-8bce-deb557dde6a8/e05c7187-b4d6-481e-8bce-deb557dde6a8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1560.510160] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dacc814a-c8ef-47e1-b1fe-0ea0df98780a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.532027] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1560.532027] env[63371]: value = "task-1774198" [ 1560.532027] env[63371]: _type = "Task" [ 1560.532027] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.542111] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.580474] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774196, 'name': ReconfigVM_Task, 'duration_secs': 0.280192} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.580855] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368367', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'name': 'volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b880750e-7bf4-412c-bcff-eb2c343f60f0', 'attached_at': '', 'detached_at': '', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'serial': '31062dc6-9857-475c-b6b3-4e33c4ca4a59'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1560.581240] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1560.582411] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d345a7-f1d2-483c-a3cb-998851daec93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.590609] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1560.590866] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04bef8cf-c465-4482-841a-b8b0372c7179 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.637615] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.691987] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774194, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.792913] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774197, 'name': Rename_Task, 'duration_secs': 0.234603} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.793234] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1560.793570] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9a74c5a-258f-4d88-b35f-45b28923edae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.802103] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1560.802103] env[63371]: value = "task-1774200" [ 1560.802103] env[63371]: _type = "Task" [ 1560.802103] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.812317] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.947043] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.947579] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1560.951476] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.805s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.952966] env[63371]: INFO nova.compute.claims [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1561.042996] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.133906] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.192387] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774194, 'name': CreateSnapshot_Task, 'duration_secs': 1.220839} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.192688] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1561.193461] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f0c373-b13e-4001-947d-e477efd88d8e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.317996] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.458844] env[63371]: DEBUG nova.compute.utils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1561.462466] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1561.462618] env[63371]: DEBUG nova.network.neutron [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1561.543738] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.545225] env[63371]: DEBUG nova.policy [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1561.637771] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.712452] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1561.712685] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5e7592ed-b86a-41f2-baa3-5312a6d7111b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.722871] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 temp